From becd08da24df2af93eee28053d32929298cdccbd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 18 Oct 2024 09:25:58 -0400 Subject: [PATCH 01/67] Close exchanges in HttpClientTests (#115059) --- .../java/org/elasticsearch/ingest/geoip/HttpClientTests.java | 2 ++ muted-tests.yml | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java index 43ed96afb07e4..f4a3cfbde4f4c 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java @@ -47,6 +47,7 @@ public static void startServer() throws Throwable { server.createContext("/404/", exchange -> { try { exchange.sendResponseHeaders(404, 0); + exchange.close(); } catch (Exception e) { fail(e); } @@ -102,6 +103,7 @@ public boolean checkCredentials(String username, String password) { exchange.getResponseHeaders().add("Location", "/" + destination + "/"); } exchange.sendResponseHeaders(302, 0); + exchange.close(); } catch (Exception e) { fail(e); } diff --git a/muted-tests.yml b/muted-tests.yml index dcd70ad3fb83c..821a96217d05c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -318,8 +318,6 @@ tests: - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testNoStream issue: https://github.com/elastic/elasticsearch/issues/114788 -- class: org.elasticsearch.ingest.geoip.HttpClientTests - issue: https://github.com/elastic/elasticsearch/issues/112618 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 From 906bf46ee8804cda18ffc0be2fbe410f963a860a Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 18 Oct 2024 16:20:43 +0200 Subject: [PATCH 02/67] Cannot skip tests named "values" (#115096) When trying to use a gradle `skipTest` rule on tests named "values", we get a class-caste exception in Jackson. This PR needs to rename this function for all versions of Elasticsearch that the `yamlRestCompatTestTransform` task will run on, so that later PRs that add skipTests will be able to pass. Since this test was added in 8.14, we must backport all the way back to there. ``` class com.fasterxml.jackson.databind.node.IntNode cannot be cast to class com.fasterxml.jackson.databind.node.ArrayNode (com.fasterxml.jackson.databind.node.IntNode and com.fasterxml.jackson.databind.node.ArrayNode are in unnamed module of loader org.gradle.internal.classloader.VisitableURLClassLoader$InstrumentingVisitableURLClassLoader @50337c96) ``` --- .../yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 939f153b8b0ea..88ef03a22d70c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -563,7 +563,7 @@ setup: - match: { values.1.0: "Payroll Specialist" } --- -values: +"values function": - requires: cluster_features: esql.agg_values reason: "values is available in 8.14+" From e927aaaa0b28645e6eff2dc36b3fd8cecd578f13 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Fri, 18 Oct 2024 17:00:10 +0200 Subject: [PATCH 03/67] Fix ML autoscaling (classic cloud) for models with zero allocations (#115082) * Fix ML autoscaling (classic cloud) for models with zero allocations * refactor a bit --- .../ml/autoscaling/MlAutoscalingContext.java | 2 +- .../MlAutoscalingDeciderServiceTests.java | 69 +++++++++++++++++++ 2 files changed, 70 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index cca59f27d5c76..f266dda6e3e5d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -177,7 +177,7 @@ public boolean isEmpty() { return anomalyDetectionTasks.isEmpty() && snapshotUpgradeTasks.isEmpty() && dataframeAnalyticsTasks.isEmpty() - && modelAssignments.isEmpty(); + && modelAssignments.values().stream().allMatch(assignment -> assignment.totalTargetAllocations() == 0); } public List findPartiallyAllocatedModels() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index 632730bc7f141..a1db31c474f31 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -29,6 +29,12 @@ import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderContext; import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderResult; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; +import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoad; @@ -262,6 +268,69 @@ public void testScale_GivenUndeterminedMemory_ShouldReturnNullCapacity() { assertThat(result.requiredCapacity(), is(nullValue())); } + public void testScale_GivenModelWithZeroAllocations() { + MlAutoscalingDeciderService service = buildService(); + service.onMaster(); + + ClusterState clusterState = new ClusterState.Builder(new ClusterName("cluster")).metadata( + Metadata.builder() + .putCustom( + TrainedModelAssignmentMetadata.NAME, + new TrainedModelAssignmentMetadata( + Map.of( + "model-with-zero-allocations", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model-with-zero-allocations", + "model-with-zero-allocations-deployment", + 400, + 0, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).setAssignmentState(AssignmentState.STARTED).build() + ) + ) + ) + .build() + ).nodes(DiscoveryNodes.builder().add(buildNode("ml-node", ByteSizeValue.ofGb(4), 8)).build()).build(); + + AutoscalingDeciderResult result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity( + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null), + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null) + ) + ) + ); + // First call doesn't downscale as delay has not been satisfied + assertThat(result.reason().summary(), containsString("down scale delay has not been satisfied")); + + // Let's move time forward 1 hour + timeSupplier.setOffset(TimeValue.timeValueHours(1)); + + result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity( + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null), + new AutoscalingCapacity.AutoscalingResources(null, ByteSizeValue.ofGb(4), null) + ) + ) + ); + assertThat(result.reason().summary(), equalTo("Requesting scale down as tier and/or node size could be smaller")); + assertThat(result.requiredCapacity().total().memory().getBytes(), equalTo(0L)); + assertThat(result.requiredCapacity().node().memory().getBytes(), equalTo(0L)); + } + private DiscoveryNode buildNode(String id, ByteSizeValue machineMemory, int allocatedProcessors) { return DiscoveryNodeUtils.create( id, From d0c8ff59328db1265c2e77c8791aed0382fc2425 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 18 Oct 2024 08:01:04 -0700 Subject: [PATCH 04/67] Refactor TSDB doc_values util allow introduce new codec (#115042) This PR refactors the doc_values utils used in the TSDB codec to allow sharing between the current codec and the new codec. --- .../AbstractDocValuesForUtilBenchmark.java | 2 +- .../codec/tsdb/internal/DecodeBenchmark.java | 3 +- .../index/codec/tsdb/DocValuesForUtil.java | 10 +++-- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 2 +- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 6 +-- ...Encoder.java => TSDBDocValuesEncoder.java} | 44 ++++++++++--------- .../codec/tsdb/DocValuesForUtilTests.java | 26 +++++------ .../tsdb/ES87TSDBDocValuesEncoderTests.java | 4 +- 8 files changed, 49 insertions(+), 48 deletions(-) rename server/src/main/java/org/elasticsearch/index/codec/tsdb/{ES87TSDBDocValuesEncoder.java => TSDBDocValuesEncoder.java} (89%) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java index 58b1d2455a7a6..53723f05728b5 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/AbstractDocValuesForUtilBenchmark.java @@ -21,7 +21,7 @@ public abstract class AbstractDocValuesForUtilBenchmark { protected final int blockSize; public AbstractDocValuesForUtilBenchmark() { - this.forUtil = new DocValuesForUtil(); + this.forUtil = new DocValuesForUtil(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); this.blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java index b8f0a11e21c8f..284324b3d9206 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java @@ -12,7 +12,6 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; -import org.elasticsearch.index.codec.tsdb.DocValuesForUtil; import org.openjdk.jmh.infra.Blackhole; import java.io.IOException; @@ -44,7 +43,7 @@ public void setupInvocation(int bitsPerValue) { @Override public void benchmark(int bitsPerValue, Blackhole bh) throws IOException { - DocValuesForUtil.decode(bitsPerValue, this.dataInput, this.output); + forUtil.decode(bitsPerValue, this.dataInput, this.output); bh.consume(this.output); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java index 648913098ff0d..db9c352ee30f8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java @@ -21,10 +21,12 @@ public class DocValuesForUtil { private static final int BITS_IN_FIVE_BYTES = 5 * Byte.SIZE; private static final int BITS_IN_SIX_BYTES = 6 * Byte.SIZE; private static final int BITS_IN_SEVEN_BYTES = 7 * Byte.SIZE; - private static final int blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + private final int blockSize; private final byte[] encoded = new byte[1024]; - public DocValuesForUtil() {} + public DocValuesForUtil(int numericBlockSize) { + this.blockSize = numericBlockSize; + } public static int roundBits(int bitsPerValue) { if (bitsPerValue > 24 && bitsPerValue <= 32) { @@ -67,7 +69,7 @@ private void encodeFiveSixOrSevenBytesPerValue(long[] in, int bitsPerValue, fina out.writeBytes(this.encoded, bytesPerValue * in.length); } - public static void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { + public void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { if (bitsPerValue <= 24) { ForUtil.decode(bitsPerValue, in, out); } else if (bitsPerValue <= 32) { @@ -81,7 +83,7 @@ public static void decode(int bitsPerValue, final DataInput in, long[] out) thro } } - private static void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { + private void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { // NOTE: we expect multibyte values to be written "least significant byte" first int bytesPerValue = bitsPerValue / Byte.SIZE; long mask = (1L << bitsPerValue) - 1; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 71d9768ac5ff7..5d79807fe6674 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -144,7 +144,7 @@ private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, lon if (maxOrd != 1) { final long[] buffer = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; int bufferSize = 0; - final ES87TSDBDocValuesEncoder encoder = new ES87TSDBDocValuesEncoder(); + final TSDBDocValuesEncoder encoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); values = valuesProducer.getSortedNumeric(field); final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index e3c2daddba80e..e3f7e829c1d2e 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -965,7 +965,7 @@ public long longValue() { private final int maxDoc = ES87TSDBDocValuesProducer.this.maxDoc; private int doc = -1; - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; @@ -1030,7 +1030,7 @@ public long longValue() throws IOException { ); return new NumericDocValues() { - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; @@ -1092,7 +1092,7 @@ private NumericValues getValues(NumericEntry entry, final long maxOrd) throws IO final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; return new NumericValues() { - private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); + private final TSDBDocValuesEncoder decoder = new TSDBDocValuesEncoder(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); private long currentBlockIndex = -1; private final long[] currentBlock = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java similarity index 89% rename from server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java rename to server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java index 4e95ce34dc410..3af9d726af4fc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBDocValuesEncoder.java @@ -44,8 +44,8 @@ * * * - * Notice that encoding and decoding are written in a nested way, for instance {@link ES87TSDBDocValuesEncoder#deltaEncode} calling - * {@link ES87TSDBDocValuesEncoder#removeOffset} and so on. This allows us to easily introduce new encoding schemes or remove existing + * Notice that encoding and decoding are written in a nested way, for instance {@link TSDBDocValuesEncoder#deltaEncode} calling + * {@link TSDBDocValuesEncoder#removeOffset} and so on. This allows us to easily introduce new encoding schemes or remove existing * (non-effective) encoding schemes in a backward-compatible way. * * A token is used as a bitmask to represent which encoding is applied and allows us to detect the applied encoding scheme at decoding time. @@ -54,11 +54,13 @@ * * Of course, decoding follows the opposite order with respect to encoding. */ -public class ES87TSDBDocValuesEncoder { +public class TSDBDocValuesEncoder { private final DocValuesForUtil forUtil; + private final int numericBlockSize; - public ES87TSDBDocValuesEncoder() { - this.forUtil = new DocValuesForUtil(); + public TSDBDocValuesEncoder(int numericBlockSize) { + this.forUtil = new DocValuesForUtil(numericBlockSize); + this.numericBlockSize = numericBlockSize; } /** @@ -68,7 +70,7 @@ public ES87TSDBDocValuesEncoder() { private void deltaEncode(int token, int tokenBits, long[] in, DataOutput out) throws IOException { int gts = 0; int lts = 0; - for (int i = 1; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 1; i < numericBlockSize; ++i) { if (in[i] > in[i - 1]) { gts++; } else if (in[i] < in[i - 1]) { @@ -79,7 +81,7 @@ private void deltaEncode(int token, int tokenBits, long[] in, DataOutput out) th final boolean doDeltaCompression = (gts == 0 && lts >= 2) || (lts == 0 && gts >= 2); long first = 0; if (doDeltaCompression) { - for (int i = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE - 1; i > 0; --i) { + for (int i = numericBlockSize - 1; i > 0; --i) { in[i] -= in[i - 1]; } // Avoid setting in[0] to 0 in case there is a minimum interval between @@ -115,7 +117,7 @@ private void removeOffset(int token, int tokenBits, long[] in, DataOutput out) t } if (min != 0) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 0; i < numericBlockSize; ++i) { in[i] -= min; } token = (token << 1) | 0x01; @@ -143,7 +145,7 @@ private void gcdEncode(int token, int tokenBits, long[] in, DataOutput out) thro } final boolean doGcdCompression = Long.compareUnsigned(gcd, 1) > 0; if (doGcdCompression) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + for (int i = 0; i < numericBlockSize; ++i) { in[i] /= gcd; } token = (token << 1) | 0x01; @@ -174,7 +176,7 @@ private void forEncode(int token, int tokenBits, long[] in, DataOutput out) thro * Encode the given longs using a combination of delta-coding, GCD factorization and bit packing. */ void encode(long[] in, DataOutput out) throws IOException { - assert in.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + assert in.length == numericBlockSize; deltaEncode(0, 0, in, out); } @@ -192,7 +194,7 @@ void encode(long[] in, DataOutput out) throws IOException { * */ void encodeOrdinals(long[] in, DataOutput out, int bitsPerOrd) throws IOException { - assert in.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + assert in.length == numericBlockSize; int numRuns = 1; long firstValue = in[0]; long previousValue = firstValue; @@ -259,7 +261,7 @@ void encodeOrdinals(long[] in, DataOutput out, int bitsPerOrd) throws IOExceptio } void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException { - assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; + assert out.length == numericBlockSize : out.length; long v1 = in.readVLong(); int encoding = Long.numberOfTrailingZeros(~v1); @@ -275,7 +277,7 @@ void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException Arrays.fill(out, runLen, out.length, v2); } else if (encoding == 2) { // bit-packed - DocValuesForUtil.decode(bitsPerOrd, in, out); + forUtil.decode(bitsPerOrd, in, out); } else if (encoding == 3) { // cycle encoding int cycleLength = (int) v1; @@ -293,13 +295,13 @@ void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException /** Decode longs that have been encoded with {@link #encode}. */ void decode(DataInput in, long[] out) throws IOException { - assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; + assert out.length == numericBlockSize : out.length; final int token = in.readVInt(); final int bitsPerValue = token >>> 3; if (bitsPerValue != 0) { - DocValuesForUtil.decode(bitsPerValue, in, out); + forUtil.decode(bitsPerValue, in, out); } else { Arrays.fill(out, 0L); } @@ -330,21 +332,21 @@ void decode(DataInput in, long[] out) throws IOException { } // this loop should auto-vectorize - private static void mul(long[] arr, long m) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void mul(long[] arr, long m) { + for (int i = 0; i < numericBlockSize; ++i) { arr[i] *= m; } } // this loop should auto-vectorize - private static void add(long[] arr, long min) { - for (int i = 0; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void add(long[] arr, long min) { + for (int i = 0; i < numericBlockSize; ++i) { arr[i] += min; } } - private static void deltaDecode(long[] arr) { - for (int i = 1; i < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++i) { + private void deltaDecode(long[] arr) { + for (int i = 1; i < numericBlockSize; ++i) { arr[i] += arr[i - 1]; } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java index 7da5463ea46ff..62474113d73d2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java @@ -31,17 +31,18 @@ import java.util.Random; public class DocValuesForUtilTests extends LuceneTestCase { + int NUMERIC_BLOCK_SIZE = 1 << 7; public void testEncodeDecode() throws IOException { final int iterations = RandomNumbers.randomIntBetween(random(), 50, 1000); - final long[] values = new long[iterations * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; + final long[] values = new long[iterations * NUMERIC_BLOCK_SIZE]; final int[] bpvs = new int[iterations]; for (int i = 0; i < iterations; ++i) { final int bpv = TestUtil.nextInt(random(), 1, 64); bpvs[i] = DocValuesForUtil.roundBits(bpv); - for (int j = 0; j < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++j) { - values[i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE + j] = bpv == 64 + for (int j = 0; j < NUMERIC_BLOCK_SIZE; ++j) { + values[i * NUMERIC_BLOCK_SIZE + j] = bpv == 64 ? random().nextLong() : TestUtil.nextLong(random(), 0, PackedInts.maxValue(bpv)); } @@ -53,12 +54,12 @@ public void testEncodeDecode() throws IOException { { // encode IndexOutput out = d.createOutput("test.bin", IOContext.DEFAULT); - final DocValuesForUtil forUtil = new DocValuesForUtil(); + final DocValuesForUtil forUtil = new DocValuesForUtil(NUMERIC_BLOCK_SIZE); for (int i = 0; i < iterations; ++i) { - long[] source = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; - for (int j = 0; j < ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; ++j) { - source[j] = values[i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE + j]; + long[] source = new long[NUMERIC_BLOCK_SIZE]; + for (int j = 0; j < NUMERIC_BLOCK_SIZE; ++j) { + source[j] = values[i * NUMERIC_BLOCK_SIZE + j]; } out.writeByte((byte) bpvs[i]); forUtil.encode(source, bpvs[i], out); @@ -70,17 +71,14 @@ public void testEncodeDecode() throws IOException { { // decode IndexInput in = d.openInput("test.bin", IOContext.READONCE); - final long[] restored = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; + final DocValuesForUtil forUtil = new DocValuesForUtil(NUMERIC_BLOCK_SIZE); + final long[] restored = new long[NUMERIC_BLOCK_SIZE]; for (int i = 0; i < iterations; ++i) { final int bitsPerValue = in.readByte(); - DocValuesForUtil.decode(bitsPerValue, in, restored); + forUtil.decode(bitsPerValue, in, restored); assertArrayEquals( Arrays.toString(restored), - ArrayUtil.copyOfSubArray( - values, - i * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE, - (i + 1) * ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE - ), + ArrayUtil.copyOfSubArray(values, i * NUMERIC_BLOCK_SIZE, (i + 1) * NUMERIC_BLOCK_SIZE), restored ); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java index 288830276915e..0010c25179b69 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java @@ -23,11 +23,11 @@ public class ES87TSDBDocValuesEncoderTests extends LuceneTestCase { - private final ES87TSDBDocValuesEncoder encoder; + private final TSDBDocValuesEncoder encoder; private final int blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; public ES87TSDBDocValuesEncoderTests() { - this.encoder = new ES87TSDBDocValuesEncoder(); + this.encoder = new TSDBDocValuesEncoder(blockSize); } public void testRandomValues() throws IOException { From 9050f8df024db01af0d9512e35c615fc18db14fa Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 18 Oct 2024 11:39:08 -0400 Subject: [PATCH 05/67] [CI] Use console=plain so that Buildkite logs aren't a mess (#115049) --- .buildkite/hooks/pre-command | 4 ++-- .ci/scripts/packaging-test.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index a886220c84cda..0ece129a3c238 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -16,10 +16,10 @@ export COMPOSE_HTTP_TIMEOUT JOB_BRANCH="$BUILDKITE_BRANCH" export JOB_BRANCH -GRADLEW="./gradlew --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" +GRADLEW="./gradlew --console=plain --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" export GRADLEW -GRADLEW_BAT="./gradlew.bat --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" +GRADLEW_BAT="./gradlew.bat --console=plain --parallel --scan --build-cache --no-watch-fs -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/" export GRADLEW_BAT export $(cat .ci/java-versions.properties | grep '=' | xargs) diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh index bb7547933b213..4d84eded8a3ff 100755 --- a/.ci/scripts/packaging-test.sh +++ b/.ci/scripts/packaging-test.sh @@ -78,5 +78,5 @@ sudo -E env \ --unset=JAVA_HOME \ SYSTEM_JAVA_HOME=`readlink -f -n $BUILD_JAVA_HOME` \ DOCKER_CONFIG="${HOME}/.docker" \ - ./gradlew -g $HOME/.gradle --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ + ./gradlew -g $HOME/.gradle --console=plain --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ From 156ba2c6d11bb5ee0041e355db5dd51f7d060274 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Fri, 18 Oct 2024 12:25:17 -0400 Subject: [PATCH 06/67] ES|QL per-cluster took time is incorrectly calculated and causes fatal exceptions (#115017) The model for calculating per-cluster `took` times from remote clusters in https://github.com/elastic/elasticsearch/pull/112595 was flawed. It attempted to use Java's System.nanoTime between the local and remote clusters, which is not safe. This results in per-cluster took times that have arbitrary (invalid) values including negative values which cause exceptions to be thrown by the `TimeValue` constructor. (Note: the overall took time calculation was done correctly, so it was the remote per-cluster took times that were flawed.) In this PR, I've done a redesign to address this. A key decision of this re-design was whether to always calculate took times only on the querying cluster (bypassing this whole problem) or to continue to allow the remote clusters to calculate their own took times for the remote processing and report that back to the querying cluster via the `ComputeResponse`. I decided in favor of having remote clusters compute their own took times for the remote processing and to additionally track "planning" time (encompassing field-caps and policy enrich remote calls), so that total per-cluster took time is a combination of the two. In _search, remote cluster took times are calculated entirely on the remote cluster, so network time is not included in the per-cluster took times. This has been helpful in diagnosing issues on user environments because if you see an overall took time that is significantly larger than the per cluster took times, that may indicate a network issue, which has happened in diagnosing cross-cluster issues in _search. I moved relative time tracking into `EsqlExecutionInfo`. The "planning time" marker is currently only used in cross-cluster searches, so it will conflict with the INLINESTATS 2 phase model (where planning can be done twice). We will improve this design to handle a 2 phase model in a later ticket, as part of the INLINESTATS work. I tested the current overall took time calculation model with local-only INLINESTATS queries and they work correctly. I also fixed another secondary bug in this PR. If the remote cluster is an older version that does not return took time (and shard info) in the ComputeResponse, the per-cluster took time is then calculated on the querying cluster as a fallback. Finally, I fixed some minor inconsistencies about whether the `_shards` info is shown in the response. The rule now is that `_shards` is always shown with 0 shards for SKIPPED clusters, with actual counts for SUCCESSFUL clusters and for remotes running an older version that doesn't report shard stats, the `_shards` field is left out of the XContent response. Fixes https://github.com/elastic/elasticsearch/issues/115022 --- .../esql/action/CrossClustersQueryIT.java | 74 ++++++---- .../xpack/esql/action/EsqlExecutionInfo.java | 49 ++++++- .../xpack/esql/plugin/ComputeListener.java | 84 ++++++++---- .../xpack/esql/plugin/ComputeService.java | 58 ++++---- .../xpack/esql/session/EsqlSession.java | 127 +++++++++--------- .../esql/plugin/ComputeListenerTests.java | 62 ++++++++- .../xpack/esql/session/EsqlSessionTests.java | 45 +++++++ 7 files changed, 331 insertions(+), 168 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index adfa2fc7273cd..ddd5cff014ed2 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -97,7 +97,8 @@ public void testSuccessfulPathways() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -106,6 +107,7 @@ public void testSuccessfulPathways() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -115,6 +117,7 @@ public void testSuccessfulPathways() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -133,7 +136,8 @@ public void testSuccessfulPathways() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -142,6 +146,7 @@ public void testSuccessfulPathways() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -151,6 +156,7 @@ public void testSuccessfulPathways() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -180,7 +186,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -189,6 +196,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); // 0 since no matching index, thus no shards to search assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -198,6 +206,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -219,7 +228,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -228,6 +238,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(remoteNumShards)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -235,8 +246,9 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("no_such_index")); - assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(0)); assertThat(localCluster.getSuccessfulShards(), equalTo(0)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -258,7 +270,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -267,6 +280,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index1,no_such_index2")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -276,6 +290,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("no_such_index*,logs-1")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -291,7 +306,8 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(true)); - assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); @@ -300,6 +316,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(remoteCluster.getIndexExpression(), equalTo("no_such_index*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -309,6 +326,7 @@ public void testSearchesWhereMissingIndicesAreSpecified() { assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(localCluster.getTotalShards(), equalTo(localNumShards)); assertThat(localCluster.getSuccessfulShards(), equalTo(localNumShards)); assertThat(localCluster.getSkippedShards(), equalTo(0)); @@ -414,20 +432,20 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(remoteCluster.getTotalShards()); - assertNull(remoteCluster.getSuccessfulShards()); - assertNull(remoteCluster.getSkippedShards()); - assertNull(remoteCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(localCluster.getTotalShards()); - assertNull(localCluster.getSuccessfulShards()); - assertNull(localCluster.getSkippedShards()); - assertNull(localCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); } try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:nomatch* | LIMIT 0", requestIncludeMeta)) { @@ -442,7 +460,8 @@ public void testCCSExecutionOnSearchesWithLimit0() { EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); assertThat(remoteCluster.getIndexExpression(), equalTo("nomatch*")); assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); - assertThat(remoteCluster.getTook().millis(), equalTo(0L)); + assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); assertThat(remoteCluster.getTotalShards(), equalTo(0)); assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); assertThat(remoteCluster.getSkippedShards(), equalTo(0)); @@ -453,10 +472,10 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(localCluster.getTotalShards()); - assertNull(localCluster.getSuccessfulShards()); - assertNull(localCluster.getSkippedShards()); - assertNull(localCluster.getFailedShards()); + assertThat(localCluster.getTotalShards(), equalTo(0)); + assertThat(localCluster.getSuccessfulShards(), equalTo(0)); + assertThat(localCluster.getSkippedShards(), equalTo(0)); + assertThat(localCluster.getFailedShards(), equalTo(0)); } try (EsqlQueryResponse resp = runQuery("FROM nomatch*,cluster-a:* | LIMIT 0", requestIncludeMeta)) { @@ -473,17 +492,20 @@ public void testCCSExecutionOnSearchesWithLimit0() { assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); - assertNull(remoteCluster.getTotalShards()); - assertNull(remoteCluster.getSuccessfulShards()); - assertNull(remoteCluster.getSkippedShards()); - assertNull(remoteCluster.getFailedShards()); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertThat(localCluster.getIndexExpression(), equalTo("nomatch*")); - // TODO: in https://github.com/elastic/elasticsearch/issues/112886, this will be changed to be SKIPPED - assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index dabccd4ffeb17..aeac14091f378 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -33,6 +33,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Predicate; @@ -55,29 +56,33 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { public static final ParseField DETAILS_FIELD = new ParseField("details"); public static final ParseField TOOK = new ParseField("took"); - // map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query - // the Map itself is immutable after construction - all Clusters will be accounted for at the start of the search - // updates to the Cluster occur with the updateCluster method that given the key to map transforms an + // Map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query + // The Map itself is immutable after construction - all Clusters will be accounted for at the start of the search. + // Updates to the Cluster occur with the updateCluster method that given the key to map transforms an // old Cluster Object to a new Cluster Object with the remapping function. public final Map clusterInfo; - // not Writeable since it is only needed on the primary CCS coordinator - private final transient Predicate skipUnavailablePredicate; private TimeValue overallTook; - // whether the user has asked for CCS metadata to be in the JSON response (the overall took will always be present) private final boolean includeCCSMetadata; + // fields that are not Writeable since they are only needed on the primary CCS coordinator + private final transient Predicate skipUnavailablePredicate; + private final transient Long relativeStartNanos; // start time for an ESQL query for calculating took times + private transient TimeValue planningTookTime; // time elapsed since start of query to calling ComputeService.execute + public EsqlExecutionInfo(boolean includeCCSMetadata) { this(Predicates.always(), includeCCSMetadata); // default all clusters to skip_unavailable=true } /** * @param skipUnavailablePredicate provide lookup for whether a given cluster has skip_unavailable set to true or false + * @param includeCCSMetadata (user defined setting) whether to include the CCS metadata in the HTTP response */ public EsqlExecutionInfo(Predicate skipUnavailablePredicate, boolean includeCCSMetadata) { this.clusterInfo = ConcurrentCollections.newConcurrentMap(); this.skipUnavailablePredicate = skipUnavailablePredicate; this.includeCCSMetadata = includeCCSMetadata; + this.relativeStartNanos = System.nanoTime(); } /** @@ -88,6 +93,7 @@ public EsqlExecutionInfo(Predicate skipUnavailablePredicate, boolean inc this.clusterInfo = clusterInfo; this.includeCCSMetadata = includeCCSMetadata; this.skipUnavailablePredicate = Predicates.always(); + this.relativeStartNanos = null; } public EsqlExecutionInfo(StreamInput in) throws IOException { @@ -106,6 +112,7 @@ public EsqlExecutionInfo(StreamInput in) throws IOException { this.includeCCSMetadata = false; } this.skipUnavailablePredicate = Predicates.always(); + this.relativeStartNanos = null; } @Override @@ -125,7 +132,35 @@ public boolean includeCCSMetadata() { return includeCCSMetadata; } - public void overallTook(TimeValue took) { + public Long getRelativeStartNanos() { + return relativeStartNanos; + } + + /** + * Call when ES|QL "planning" phase is complete and query execution (in ComputeService) is about to start. + * Note this is currently only built for a single phase planning/execution model. When INLINESTATS + * moves towards GA we may need to revisit this model. Currently, it should never be called more than once. + */ + public void markEndPlanning() { + assert planningTookTime == null : "markEndPlanning should only be called once"; + assert relativeStartNanos != null : "Relative start time must be set when markEndPlanning is called"; + planningTookTime = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); + } + + public TimeValue planningTookTime() { + return planningTookTime; + } + + /** + * Call when ES|QL execution is complete in order to set the overall took time for an ES|QL query. + */ + public void markEndQuery() { + assert relativeStartNanos != null : "Relative start time must be set when markEndQuery is called"; + overallTook = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); + } + + // for testing only - use markEndQuery in production code + void overallTook(TimeValue took) { this.overallTook = took; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java index d8fc4da070767..49af4a593e6e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -47,7 +47,6 @@ final class ComputeListener implements Releasable { private final List collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final EsqlExecutionInfo esqlExecutionInfo; - private final long queryStartTimeNanos; // clusterAlias indicating where this ComputeListener is running // used by the top level ComputeListener in ComputeService on both local and remote clusters private final String whereRunning; @@ -61,7 +60,7 @@ public static ComputeListener create( CancellableTask task, ActionListener delegate ) { - return new ComputeListener(transportService, task, null, null, -1, delegate); + return new ComputeListener(transportService, task, null, null, delegate); } /** @@ -75,7 +74,6 @@ public static ComputeListener create( * @param transportService * @param task * @param executionInfo {@link EsqlExecutionInfo} to capture execution metadata - * @param queryStartTimeNanos Start time of the ES|QL query (stored in {@link org.elasticsearch.xpack.esql.session.Configuration}) * @param delegate */ public static ComputeListener create( @@ -83,10 +81,9 @@ public static ComputeListener create( TransportService transportService, CancellableTask task, EsqlExecutionInfo executionInfo, - long queryStartTimeNanos, ActionListener delegate ) { - return new ComputeListener(transportService, task, clusterAlias, executionInfo, queryStartTimeNanos, delegate); + return new ComputeListener(transportService, task, clusterAlias, executionInfo, delegate); } private ComputeListener( @@ -94,7 +91,6 @@ private ComputeListener( CancellableTask task, String clusterAlias, EsqlExecutionInfo executionInfo, - long queryStartTimeNanos, ActionListener delegate ) { this.transportService = transportService; @@ -102,7 +98,6 @@ private ComputeListener( this.responseHeaders = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); this.esqlExecutionInfo = executionInfo; - this.queryStartTimeNanos = queryStartTimeNanos; this.whereRunning = clusterAlias; // for the DataNodeHandler ComputeListener, clusterAlias and executionInfo will be null // for the top level ComputeListener in ComputeService both will be non-null @@ -129,11 +124,15 @@ private ComputeListener( } else { result = new ComputeResponse(collectedProfiles.isEmpty() ? List.of() : collectedProfiles.stream().toList()); if (coordinatingClusterIsSearchedInCCS()) { - // mark local cluster as finished once the coordinator and all data nodes have finished processing - executionInfo.swapCluster( - RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build() - ); + // if not already marked as SKIPPED, mark the local cluster as finished once the coordinator and all + // data nodes have finished processing + executionInfo.swapCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, (k, v) -> { + if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); + } else { + return v; + } + }); } } delegate.onResponse(result); @@ -196,8 +195,8 @@ ActionListener acquireAvoid() { * info to be gathered (namely, the DataNodeRequestHandler ComputeListener) should pass in null. */ ActionListener acquireCompute(@Nullable String computeClusterAlias) { - assert computeClusterAlias == null || (esqlExecutionInfo != null && queryStartTimeNanos > 0) - : "When clusterAlias is provided to acquireCompute, executionInfo must be non-null and queryStartTimeNanos must be positive"; + assert computeClusterAlias == null || (esqlExecutionInfo != null && esqlExecutionInfo.getRelativeStartNanos() != null) + : "When clusterAlias is provided to acquireCompute, executionInfo and relativeStartTimeNanos must be non-null"; return acquireAvoid().map(resp -> { responseHeaders.collect(); @@ -209,24 +208,17 @@ ActionListener acquireCompute(@Nullable String computeClusterAl return null; } if (isCCSListener(computeClusterAlias)) { - // this is the callback for the listener to the CCS compute - esqlExecutionInfo.swapCluster( - computeClusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) - // for now ESQL doesn't return partial results, so set status to SUCCESSFUL - .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) - .setTook(resp.getTook()) - .setTotalShards(resp.getTotalShards()) - .setSuccessfulShards(resp.getSuccessfulShards()) - .setSkippedShards(resp.getSkippedShards()) - .setFailedShards(resp.getFailedShards()) - .build() - ); + // this is the callback for the listener on the primary coordinator that receives a remote ComputeResponse + updateExecutionInfoWithRemoteResponse(computeClusterAlias, resp); + } else if (shouldRecordTookTime()) { + Long relativeStartNanos = esqlExecutionInfo.getRelativeStartNanos(); // handler for this cluster's data node and coordinator completion (runs on "local" and remote clusters) - TimeValue tookTime = new TimeValue(System.nanoTime() - queryStartTimeNanos, TimeUnit.NANOSECONDS); + assert relativeStartNanos != null : "queryStartTimeNanos not set properly"; + TimeValue tookTime = new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); esqlExecutionInfo.swapCluster(computeClusterAlias, (k, v) -> { - if (v.getTook() == null || v.getTook().nanos() < tookTime.nanos()) { + if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED + && (v.getTook() == null || v.getTook().nanos() < tookTime.nanos())) { return new EsqlExecutionInfo.Cluster.Builder(v).setTook(tookTime).build(); } else { return v; @@ -237,6 +229,40 @@ ActionListener acquireCompute(@Nullable String computeClusterAl }); } + private void updateExecutionInfoWithRemoteResponse(String computeClusterAlias, ComputeResponse resp) { + TimeValue tookOnCluster; + if (resp.getTook() != null) { + TimeValue remoteExecutionTime = resp.getTook(); + TimeValue planningTookTime = esqlExecutionInfo.planningTookTime(); + tookOnCluster = new TimeValue(planningTookTime.nanos() + remoteExecutionTime.nanos(), TimeUnit.NANOSECONDS); + esqlExecutionInfo.swapCluster( + computeClusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) + // for now ESQL doesn't return partial results, so set status to SUCCESSFUL + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTook(tookOnCluster) + .setTotalShards(resp.getTotalShards()) + .setSuccessfulShards(resp.getSuccessfulShards()) + .setSkippedShards(resp.getSkippedShards()) + .setFailedShards(resp.getFailedShards()) + .build() + ); + } else { + // if the cluster is an older version and does not send back took time, then calculate it here on the coordinator + // and leave shard info unset, so it is not shown in the CCS metadata section of the JSON response + long remoteTook = System.nanoTime() - esqlExecutionInfo.getRelativeStartNanos(); + tookOnCluster = new TimeValue(remoteTook, TimeUnit.NANOSECONDS); + esqlExecutionInfo.swapCluster( + computeClusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) + // for now ESQL doesn't return partial results, so set status to SUCCESSFUL + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTook(tookOnCluster) + .build() + ); + } + } + /** * Use this method when no execution metadata needs to be added to {@link EsqlExecutionInfo} */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index f714695504a1d..108e70d7d3a50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -32,7 +32,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilder; @@ -81,7 +80,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -173,19 +171,10 @@ public void execute( null ); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - try ( - var computeListener = ComputeListener.create( - local, - transportService, - rootTask, - execInfo, - configuration.getQueryStartTimeNanos(), - listener.map(r -> { - updateExecutionInfoAfterCoordinatorOnlyQuery(configuration.getQueryStartTimeNanos(), execInfo); - return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); - }) - ) - ) { + try (var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { + updateExecutionInfoAfterCoordinatorOnlyQuery(execInfo); + return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); + }))) { runCompute(rootTask, computeContext, coordinatorPlan, computeListener.acquireCompute(local)); return; } @@ -205,7 +194,6 @@ public void execute( queryPragmas.exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); - long start = configuration.getQueryStartTimeNanos(); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; /* * Grab the output attributes here, so we can pass them to @@ -216,9 +204,8 @@ public void execute( try ( Releasable ignored = exchangeSource.addEmptySink(); // this is the top level ComputeListener called once at the end (e.g., once all clusters have finished for a CCS) - var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, start, listener.map(r -> { - long tookTimeNanos = System.nanoTime() - configuration.getQueryStartTimeNanos(); - execInfo.overallTook(new TimeValue(tookTimeNanos, TimeUnit.NANOSECONDS)); + var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { + execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements return new Result(outputAttributes, collectedPages, r.getProfiles(), execInfo); })) ) { @@ -258,22 +245,24 @@ public void execute( } } - private static void updateExecutionInfoAfterCoordinatorOnlyQuery(long queryStartNanos, EsqlExecutionInfo execInfo) { - long tookTimeNanos = System.nanoTime() - queryStartNanos; - execInfo.overallTook(new TimeValue(tookTimeNanos, TimeUnit.NANOSECONDS)); + // For queries like: FROM logs* | LIMIT 0 (including cross-cluster LIMIT 0 queries) + private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionInfo execInfo) { + execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements if (execInfo.isCrossClusterSearch()) { + assert execInfo.planningTookTime() != null : "Planning took time should be set on EsqlExecutionInfo but is null"; for (String clusterAlias : execInfo.clusterAliases()) { - // The local cluster 'took' time gets updated as part of the acquireCompute(local) call in the coordinator, so - // here we only need to update status for remote clusters since there are no remote ComputeListeners in this case. - // This happens in cross cluster searches that use LIMIT 0, e.g, FROM logs*,remote*:logs* | LIMIT 0. - if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { - execInfo.swapCluster(clusterAlias, (k, v) -> { - if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { - return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); - } else { - return v; - } - }); + // took time and shard counts for SKIPPED clusters were added at end of planning, so only update other cases here + if (execInfo.getCluster(clusterAlias).getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + execInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.overallTook()) + .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); } } } @@ -837,8 +826,7 @@ public void messageReceived(ClusterComputeRequest request, TransportChannel chan EsqlExecutionInfo execInfo = new EsqlExecutionInfo(true); execInfo.swapCluster(clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(clusterAlias, Arrays.toString(request.indices()))); CancellableTask cancellable = (CancellableTask) task; - long start = request.configuration().getQueryStartTimeNanos(); - try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, start, listener)) { + try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, listener)) { runComputeOnRemoteCluster( clusterAlias, request.sessionId(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 96391c841856f..788b2827d7c8e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -72,7 +72,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Predicate; @@ -164,6 +163,7 @@ public void executeOptimizedPlan( LogicalPlan firstPhase = Phased.extractFirstPhase(optimizedPlan); if (firstPhase == null) { runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); + updateExecutionInfoAtEndOfPlanning(executionInfo); } else { executePhased(new ArrayList<>(), optimizedPlan, request, executionInfo, firstPhase, runPhase, listener); } @@ -246,7 +246,6 @@ private void preAnalyze( if (indexResolution.isValid()) { updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.getUnavailableClusters()); - updateTookTimeForRemoteClusters(executionInfo); Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( indexResolution.get().concreteIndices().toArray(String[]::new) ).keySet(); @@ -267,68 +266,6 @@ private void preAnalyze( })); } - // visible for testing - static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo executionInfo, Set unavailableClusters) { - for (String clusterAlias : unavailableClusters) { - executionInfo.swapCluster( - clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED).build() - ); - // TODO: follow-on PR will set SKIPPED status when skip_unavailable=true and throw an exception when skip_un=false - } - } - - // visible for testing - static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionInfo executionInfo, IndexResolution indexResolution) { - Set clustersWithResolvedIndices = new HashSet<>(); - // determine missing clusters - for (String indexName : indexResolution.get().indexNameWithModes().keySet()) { - clustersWithResolvedIndices.add(RemoteClusterAware.parseClusterAlias(indexName)); - } - Set clustersRequested = executionInfo.clusterAliases(); - Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); - clustersWithNoMatchingIndices.removeAll(indexResolution.getUnavailableClusters()); - /* - * These are clusters in the original request that are not present in the field-caps response. They were - * specified with an index or indices that do not exist, so the search on that cluster is done. - * Mark it as SKIPPED with 0 shards searched and took=0. - */ - for (String c : clustersWithNoMatchingIndices) { - executionInfo.swapCluster( - c, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED) - .setTook(new TimeValue(0)) - .setTotalShards(0) - .setSuccessfulShards(0) - .setSkippedShards(0) - .setFailedShards(0) - .build() - ); - } - } - - private void updateTookTimeForRemoteClusters(EsqlExecutionInfo executionInfo) { - if (executionInfo.isCrossClusterSearch()) { - for (String clusterAlias : executionInfo.clusterAliases()) { - if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { - executionInfo.swapCluster(clusterAlias, (k, v) -> { - if (v.getTook() == null && v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { - // set took time in case we are finished with the remote cluster (e.g., FROM foo | LIMIT 0). - // this will be overwritten later if ES|QL operations happen on the remote cluster (the typical scenario) - TimeValue took = new TimeValue( - System.nanoTime() - configuration.getQueryStartTimeNanos(), - TimeUnit.NANOSECONDS - ); - return new EsqlExecutionInfo.Cluster.Builder(v).setTook(took).build(); - } else { - return v; - } - }); - } - } - } - } - private void preAnalyzeIndices( LogicalPlan parsed, EsqlExecutionInfo executionInfo, @@ -508,4 +445,66 @@ public PhysicalPlan optimizedPhysicalPlan(LogicalPlan optimizedPlan) { LOGGER.debug("Optimized physical plan:\n{}", plan); return plan; } + + // visible for testing + static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo executionInfo, Set unavailableClusters) { + for (String clusterAlias : unavailableClusters) { + executionInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED).build() + ); + // TODO: follow-on PR will set SKIPPED status when skip_unavailable=true and throw an exception when skip_un=false + } + } + + // visible for testing + static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionInfo executionInfo, IndexResolution indexResolution) { + Set clustersWithResolvedIndices = new HashSet<>(); + // determine missing clusters + for (String indexName : indexResolution.get().indexNameWithModes().keySet()) { + clustersWithResolvedIndices.add(RemoteClusterAware.parseClusterAlias(indexName)); + } + Set clustersRequested = executionInfo.clusterAliases(); + Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); + clustersWithNoMatchingIndices.removeAll(indexResolution.getUnavailableClusters()); + /* + * These are clusters in the original request that are not present in the field-caps response. They were + * specified with an index or indices that do not exist, so the search on that cluster is done. + * Mark it as SKIPPED with 0 shards searched and took=0. + */ + for (String c : clustersWithNoMatchingIndices) { + executionInfo.swapCluster( + c, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED) + .setTook(new TimeValue(0)) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); + } + } + + // visible for testing + static void updateExecutionInfoAtEndOfPlanning(EsqlExecutionInfo execInfo) { + // TODO: this logic assumes a single phase execution model, so it may need to altered once INLINESTATS is made CCS compatible + if (execInfo.isCrossClusterSearch()) { + execInfo.markEndPlanning(); + for (String clusterAlias : execInfo.clusterAliases()) { + EsqlExecutionInfo.Cluster cluster = execInfo.getCluster(clusterAlias); + if (cluster.getStatus() == EsqlExecutionInfo.Cluster.Status.SKIPPED) { + execInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.planningTookTime()) + .setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); + } + } + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 8cfcb605a19d5..5fbd5dd28050f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -132,7 +132,6 @@ public void testEmpty() { transportService, newTask(), executionInfo, - System.nanoTime(), results ) ) { @@ -152,7 +151,6 @@ public void testCollectComputeResults() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -196,6 +194,7 @@ public void testAcquireComputeCCSListener() { String remoteAlias = "rc1"; EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster(remoteAlias, (k, v) -> new EsqlExecutionInfo.Cluster(remoteAlias, "logs*", false)); + executionInfo.markEndPlanning(); // set planning took time, so it can be used to calculate per-cluster took time try ( ComputeListener computeListener = ComputeListener.create( // 'whereRunning' for this test is the local cluster, waiting for a response from the remote cluster @@ -203,7 +202,6 @@ public void testAcquireComputeCCSListener() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -239,6 +237,60 @@ public void testAcquireComputeCCSListener() { Mockito.verifyNoInteractions(transportService.getTaskManager()); } + /** + * Tests the acquireCompute functionality running on the querying ("local") cluster, that is waiting upon + * a ComputeResponse from a remote cluster where we simulate connecting to a remote cluster running a version + * of ESQL that does not record and return CCS metadata. Ensure that the local cluster {@link EsqlExecutionInfo} + * is properly updated with took time and shard info is left unset. + */ + public void testAcquireComputeCCSListenerWithComputeResponseFromOlderCluster() { + PlainActionFuture future = new PlainActionFuture<>(); + List allProfiles = new ArrayList<>(); + String remoteAlias = "rc1"; + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); + executionInfo.swapCluster(remoteAlias, (k, v) -> new EsqlExecutionInfo.Cluster(remoteAlias, "logs*", false)); + executionInfo.markEndPlanning(); // set planning took time, so it can be used to calculate per-cluster took time + try ( + ComputeListener computeListener = ComputeListener.create( + // 'whereRunning' for this test is the local cluster, waiting for a response from the remote cluster + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + transportService, + newTask(), + executionInfo, + future + ) + ) { + int tasks = randomIntBetween(1, 5); + for (int t = 0; t < tasks; t++) { + ComputeResponse resp = randomResponse(false); // older clusters will not return CCS metadata in response + allProfiles.addAll(resp.getProfiles()); + // Use remoteAlias here to indicate what remote cluster alias the listener is waiting to hear back from + ActionListener subListener = computeListener.acquireCompute(remoteAlias); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(resp)), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } + } + ComputeResponse response = future.actionGet(10, TimeUnit.SECONDS); + assertThat( + response.getProfiles().stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum)), + equalTo(allProfiles.stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum))) + ); + + assertTrue(executionInfo.isCrossClusterSearch()); + EsqlExecutionInfo.Cluster rc1Cluster = executionInfo.getCluster(remoteAlias); + assertThat(rc1Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertNull(rc1Cluster.getTotalShards()); + assertNull(rc1Cluster.getSuccessfulShards()); + assertNull(rc1Cluster.getSkippedShards()); + assertNull(rc1Cluster.getFailedShards()); + assertThat(rc1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + + Mockito.verifyNoInteractions(transportService.getTaskManager()); + } + /** * Run an acquireCompute cycle on the RemoteCluster. * AcquireCompute will fill in the took time on the EsqlExecutionInfo (the shard info is filled in before this, @@ -271,7 +323,6 @@ public void testAcquireComputeRunningOnRemoteClusterFillsInTookTime() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -331,7 +382,6 @@ public void testAcquireComputeRunningOnQueryingClusterFillsInTookTime() { transportService, newTask(), executionInfo, - System.nanoTime(), future ) ) { @@ -379,7 +429,6 @@ public void testCancelOnFailure() throws Exception { transportService, rootTask, execInfo, - System.nanoTime(), rootListener ) ) { @@ -443,7 +492,6 @@ public void onFailure(Exception e) { transportService, newTask(), executionInfo, - System.nanoTime(), ActionListener.runAfter(rootListener, latch::countDown) ) ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java index 7e93213fcee21..32b31cf78650b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java @@ -21,6 +21,7 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class EsqlSessionTests extends ESTestCase { @@ -243,6 +244,50 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { } } + public void testUpdateExecutionInfoAtEndOfPlanning() { + String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + String remote1Alias = "remote1"; + String remote2Alias = "remote2"; + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); + executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); + executionInfo.swapCluster( + remote1Alias, + (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) + ); + executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); + + assertNull(executionInfo.planningTookTime()); + assertNull(executionInfo.overallTook()); + try { + Thread.sleep(1); + } catch (InterruptedException e) {} + + EsqlSession.updateExecutionInfoAtEndOfPlanning(executionInfo); + + assertThat(executionInfo.planningTookTime().millis(), greaterThanOrEqualTo(0L)); + assertNull(executionInfo.overallTook()); + + // only remote1 should be altered, since it is the only one marked as SKIPPED when passed into updateExecutionInfoAtEndOfPlanning + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(localCluster.getTotalShards()); + assertNull(localCluster.getTook()); + + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); + assertThat(remote1Cluster.getTotalShards(), equalTo(0)); + assertThat(remote1Cluster.getSuccessfulShards(), equalTo(0)); + assertThat(remote1Cluster.getSkippedShards(), equalTo(0)); + assertThat(remote1Cluster.getFailedShards(), equalTo(0)); + assertThat(remote1Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remote1Cluster.getTook().millis(), equalTo(executionInfo.planningTookTime().millis())); + + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(remote2Cluster.getTotalShards()); + assertNull(remote2Cluster.getTook()); + } + private void assertClusterStatusAndHasNullCounts(EsqlExecutionInfo.Cluster cluster, EsqlExecutionInfo.Cluster.Status status) { assertThat(cluster.getStatus(), equalTo(status)); assertNull(cluster.getTook()); From be22d2a4ea7596cc418f7ad33eb0ee01e746a3a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 18 Oct 2024 18:52:04 +0200 Subject: [PATCH 07/67] [Test] Fix SearchRequestCacheDisablingInterceptorTests (#114828) The https://github.com/elastic/elasticsearch/pull/113501 PR introduced a change where `:index-name` is no longer considered a valid remote index name. The valid remote index name has to have a non-empty remote cluster name, e.g. `my-remote-cluster:index-name`. This PR changes tests to avoid randomly generating empty remote cluster names. Resolves https://github.com/elastic/elasticsearch/issues/113659, https://github.com/elastic/elasticsearch/issues/113660 --- muted-tests.yml | 6 ------ .../SearchRequestCacheDisablingInterceptorTests.java | 4 ++-- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 821a96217d05c..1c0b45bc1527c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -206,12 +206,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start and stop multiple deployments} issue: https://github.com/elastic/elasticsearch/issues/101458 -- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests - method: testHasRemoteIndices - issue: https://github.com/elastic/elasticsearch/issues/113660 -- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests - method: testRequestCacheWillBeDisabledWhenSearchRemoteIndices - issue: https://github.com/elastic/elasticsearch/issues/113659 - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize ASYNC} issue: https://github.com/elastic/elasticsearch/issues/113721 diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java index 2c4a03b7df501..b09527061f0d5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java @@ -91,7 +91,7 @@ public void testRequestCacheWillBeDisabledWhenSearchRemoteIndices() { 0, 3, String[]::new, - () -> randomAlphaOfLengthBetween(0, 5) + ":" + randomAlphaOfLengthBetween(3, 8) + () -> randomAlphaOfLengthBetween(1, 5) + ":" + randomAlphaOfLengthBetween(3, 8) ); final ArrayList allIndices = Arrays.stream(ArrayUtils.concat(localIndices, remoteIndices)) .collect(Collectors.toCollection(ArrayList::new)); @@ -121,7 +121,7 @@ public void testHasRemoteIndices() { 0, 3, String[]::new, - () -> randomAlphaOfLengthBetween(0, 5) + ":" + randomAlphaOfLengthBetween(3, 8) + () -> randomAlphaOfLengthBetween(1, 5) + ":" + randomAlphaOfLengthBetween(3, 8) ); final ArrayList allIndices = Arrays.stream(ArrayUtils.concat(localIndices, remoteIndices)) .collect(Collectors.toCollection(ArrayList::new)); From f80723e710049a6558d753c0d121e5495bf091ec Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 04:18:58 +1100 Subject: [PATCH 08/67] Mute org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT test {string.ValuesGrouped} #115126 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1c0b45bc1527c..6f2f08e22a805 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -356,6 +356,9 @@ tests: - class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115076 +- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT + method: test {string.ValuesGrouped} + issue: https://github.com/elastic/elasticsearch/issues/115126 # Examples: # From 8e091579cd01f852fc45d85d41ff6d2b8e2b763b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 04:37:03 +1100 Subject: [PATCH 09/67] Mute org.elasticsearch.xpack.esql.action.CrossClustersQueryIT testCCSExecutionOnSearchesWithLimit0 #115129 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 6f2f08e22a805..1e46b95bff1ef 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -359,6 +359,9 @@ tests: - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {string.ValuesGrouped} issue: https://github.com/elastic/elasticsearch/issues/115126 +- class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT + method: testCCSExecutionOnSearchesWithLimit0 + issue: https://github.com/elastic/elasticsearch/issues/115129 # Examples: # From a5118c2d9fa5e2442c408089387e75337b2a5d4a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 18 Oct 2024 13:46:32 -0400 Subject: [PATCH 10/67] Add timeout and cancellation check to rescore phase (#115048) This adds cancellation checks to rescore phase. This cancellation checks for the parent task being cancelled and for timeout checks. The assumption is that rescore is always significantly more expensive than a regular query, so we check for timeout as frequently as the most frequent check in ExitableDirectoryReader. For LTR, we check on hit inference. Maybe we should also check for per feature extraction? For QueryRescorer, we check in the combine method. closes: https://github.com/elastic/elasticsearch/issues/114955 --- docs/changelog/115048.yaml | 5 + .../search/functionscore/QueryRescorerIT.java | 124 +++++++++++++++++ .../search/internal/ContextIndexSearcher.java | 2 +- .../search/rescore/QueryRescorer.java | 6 + .../search/rescore/RescoreContext.java | 11 ++ .../search/rescore/RescorePhase.java | 37 +++++ .../search/rescore/RescorePhaseTests.java | 127 ++++++++++++++++++ .../inference/ltr/LearningToRankRescorer.java | 9 ++ 8 files changed, 320 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115048.yaml create mode 100644 server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java diff --git a/docs/changelog/115048.yaml b/docs/changelog/115048.yaml new file mode 100644 index 0000000000000..10844b83c6d01 --- /dev/null +++ b/docs/changelog/115048.yaml @@ -0,0 +1,5 @@ +pr: 115048 +summary: Add timeout and cancellation check to rescore phase +area: Ranking +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 025d224923dc0..6043688b7670a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -9,19 +9,30 @@ package org.elasticsearch.search.functionscore; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.tests.util.English; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.collapse.CollapseBuilder; @@ -29,11 +40,14 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.List; @@ -979,9 +993,119 @@ public void testRescoreAfterCollapseRandom() throws Exception { }); } + public void testRescoreWithTimeout() throws Exception { + // no dummy docs since merges can change scores while we run queries. + int numDocs = indexRandomNumbers("whitespace", -1, false); + + String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); + String query = intToEnglish.split(" ")[0]; + assertResponse( + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) + .setSize(10) + .addRescorer(new QueryRescorerBuilder(functionScoreQuery(new TestTimedScoreFunctionBuilder())).windowSize(100)) + .setTimeout(TimeValue.timeValueMillis(10)), + r -> assertTrue(r.isTimedOut()) + ); + } + + @Override + protected Collection> nodePlugins() { + return List.of(TestTimedQueryPlugin.class); + } + private QueryBuilder fieldValueScoreQuery(String scoreField) { return functionScoreQuery(termQuery("shouldFilter", false), ScoreFunctionBuilders.fieldValueFactorFunction(scoreField)).boostMode( CombineFunction.REPLACE ); } + + public static class TestTimedQueryPlugin extends Plugin implements SearchPlugin { + @Override + public List> getScoreFunctions() { + return List.of( + new ScoreFunctionSpec<>( + new ParseField("timed"), + TestTimedScoreFunctionBuilder::new, + p -> new TestTimedScoreFunctionBuilder() + ) + ); + } + } + + static class TestTimedScoreFunctionBuilder extends ScoreFunctionBuilder { + private final long time = 500; + + TestTimedScoreFunctionBuilder() {} + + TestTimedScoreFunctionBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) {} + + @Override + public String getName() { + return "timed"; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected boolean doEquals(TestTimedScoreFunctionBuilder functionBuilder) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected ScoreFunction doToFunction(SearchExecutionContext context) throws IOException { + return new ScoreFunction(REPLACE) { + @Override + public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { + return new LeafScoreFunction() { + @Override + public double score(int docId, float subQueryScore) { + try { + Thread.sleep(time); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + return time; + } + + @Override + public Explanation explainScore(int docId, Explanation subQueryScore) { + return null; + } + }; + } + + @Override + public boolean needsScores() { + return true; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + }; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 18de4b81cbf8c..da5d2d093fbd8 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -407,7 +407,7 @@ public void throwTimeExceededException() { } } - private static class TimeExceededException extends RuntimeException { + public static class TimeExceededException extends RuntimeException { // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index 5cd947a1cc73b..cb9169dbeb5e5 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -26,6 +26,7 @@ public final class QueryRescorer implements Rescorer { + private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10; public static final Rescorer INSTANCE = new QueryRescorer(); @Override @@ -39,9 +40,14 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r final QueryRescoreContext rescore = (QueryRescoreContext) rescoreContext; org.apache.lucene.search.Rescorer rescorer = new org.apache.lucene.search.QueryRescorer(rescore.parsedQuery().query()) { + int count = 0; @Override protected float combine(float firstPassScore, boolean secondPassMatches, float secondPassScore) { + if (count % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescore.checkCancellation(); + } + count++; if (secondPassMatches) { return rescore.scoreMode.combine( firstPassScore * rescore.queryWeight(), diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java index 297b197a6d0c1..0ae6c326ddcdc 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java @@ -24,6 +24,7 @@ public class RescoreContext { private final int windowSize; private final Rescorer rescorer; private Set rescoredDocs; // doc Ids for which rescoring was applied + private Runnable isCancelled; /** * Build the context. @@ -34,6 +35,16 @@ public RescoreContext(int windowSize, Rescorer rescorer) { this.rescorer = rescorer; } + public void setCancellationChecker(Runnable isCancelled) { + this.isCancelled = isCancelled; + } + + public void checkCancellation() { + if (isCancelled != null) { + isCancelled.run(); + } + } + /** * The rescorer to actually apply. */ diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index be961b8ef942b..1227db5d8e1db 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -14,12 +14,18 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.Maps; import org.elasticsearch.lucene.grouping.TopFieldGroups; +import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** @@ -44,11 +50,14 @@ public static void execute(SearchContext context) { topGroups = topFieldGroups; } try { + Runnable cancellationCheck = getCancellationChecks(context); for (RescoreContext ctx : context.rescore()) { + ctx.setCancellationChecker(cancellationCheck); topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); // It is the responsibility of the rescorer to sort the resulted top docs, // here we only assert that this condition is met. assert context.sort() == null && topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; + ctx.setCancellationChecker(null); } if (topGroups != null) { assert context.collapse() != null; @@ -63,6 +72,11 @@ public static void execute(SearchContext context) { .topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); + } catch (ContextIndexSearcher.TimeExceededException e) { + if (context.request().allowPartialSearchResults() == false) { + throw new SearchTimeoutException(context.shardTarget(), "Time exceeded"); + } + context.queryResult().searchTimedOut(true); } } @@ -106,4 +120,27 @@ private static boolean topDocsSortedByScore(TopDocs topDocs) { } return true; } + + static Runnable getCancellationChecks(SearchContext context) { + List cancellationChecks = new ArrayList<>(); + if (context.lowLevelCancellation()) { + cancellationChecks.add(() -> { + final SearchShardTask task = context.getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }); + } + + final Runnable timeoutRunnable = QueryPhase.getTimeoutCheck(context); + if (timeoutRunnable != null) { + cancellationChecks.add(timeoutRunnable); + } + + return () -> { + for (var check : cancellationChecks) { + check.run(); + } + }; + } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java b/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java new file mode 100644 index 0000000000000..5a1c4b789b460 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/rescore/RescorePhaseTests.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.rescore; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.action.search.SearchShardTask; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.FilteredSearchContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.tasks.TaskCancelHelper; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.TestSearchContext; + +import java.io.IOException; +import java.util.Collections; + +public class RescorePhaseTests extends IndexShardTestCase { + + public void testRescorePhaseCancellation() throws IOException { + IndexWriterConfig iwc = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); + try (Directory dir = newDirectory()) { + try (RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc)) { + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + w.addDocument(doc); + } + } + try (IndexReader reader = DirectoryReader.open(dir)) { + ContextIndexSearcher s = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + new QueryCachingPolicy() { + @Override + public void onUse(Query query) {} + + @Override + public boolean shouldCache(Query query) { + return false; + } + }, + true + ); + IndexShard shard = newShard(true); + try (TestSearchContext context = new TestSearchContext(null, shard, s)) { + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + context.setTask(task); + SearchContext wrapped = new FilteredSearchContext(context) { + @Override + public boolean lowLevelCancellation() { + return true; + } + + @Override + public FetchDocValuesContext docValuesContext() { + return context.docValuesContext(); + } + + @Override + public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { + return context.docValuesContext(docValuesContext); + } + + @Override + public FetchFieldsContext fetchFieldsContext() { + return context.fetchFieldsContext(); + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + return context.fetchFieldsContext(fetchFieldsContext); + } + }; + try (wrapped) { + Runnable cancellationChecks = RescorePhase.getCancellationChecks(wrapped); + assertNotNull(cancellationChecks); + TaskCancelHelper.cancel(task, "test cancellation"); + assertTrue(wrapped.isCancelled()); + expectThrows(TaskCancelledException.class, cancellationChecks::run); + QueryRescorer.QueryRescoreContext rescoreContext = new QueryRescorer.QueryRescoreContext(10); + rescoreContext.setQuery(new ParsedQuery(new MatchAllDocsQuery())); + rescoreContext.setCancellationChecker(cancellationChecks); + expectThrows( + TaskCancelledException.class, + () -> new QueryRescorer().rescore( + new TopDocs( + new TotalHits(10, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new ScoreDoc[] { new ScoreDoc(0, 1.0f) } + ), + context.searcher(), + rescoreContext + ) + ); + } + } + closeShards(shard); + } + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index 70d0b980bb3bf..54a9fe908fa87 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -35,6 +35,7 @@ public class LearningToRankRescorer implements Rescorer { + private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10; public static final LearningToRankRescorer INSTANCE = new LearningToRankRescorer(); private static final Logger logger = LogManager.getLogger(LearningToRankRescorer.class); @@ -78,7 +79,12 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r List featureExtractors = ltrRescoreContext.buildFeatureExtractors(searcher); List> docFeatures = new ArrayList<>(topDocIDs.size()); int featureSize = featureExtractors.stream().mapToInt(fe -> fe.featureNames().size()).sum(); + int count = 0; while (hitUpto < hitsToRescore.length) { + if (count % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescoreContext.checkCancellation(); + } + count++; final ScoreDoc hit = hitsToRescore[hitUpto]; final int docID = hit.doc; while (docID >= endDoc) { @@ -106,6 +112,9 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r hitUpto++; } for (int i = 0; i < hitsToRescore.length; i++) { + if (i % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) { + rescoreContext.checkCancellation(); + } Map features = docFeatures.get(i); try { InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learningToRankConfig); From d9c2eba47158f3832a675c82693c3c053f209c44 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 18 Oct 2024 12:48:09 -0500 Subject: [PATCH 11/67] Avoiding possibility of duplicate index names in IndexLifecycleServiceTests.testExceptionStillProcessesOtherIndices (#115118) --- .../elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index 209839c9d24df..eceb81542377a 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -390,7 +390,10 @@ public void doTestExceptionStillProcessesOtherIndices(boolean useOnMaster) { MockAction mockAction = new MockAction(Collections.singletonList(i2mockStep)); Phase i2phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i2phase.getName(), i1phase)); - Index index2 = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); + Index index2 = new Index( + randomValueOtherThan(index1.getName(), () -> randomAlphaOfLengthBetween(1, 20)), + randomAlphaOfLengthBetween(1, 20) + ); LifecycleExecutionState.Builder i2lifecycleState = LifecycleExecutionState.builder(); i2lifecycleState.setPhase(i2currentStepKey.phase()); i2lifecycleState.setAction(i2currentStepKey.action()); From 8a613e7f171afee63af2d5efc8ddbd5ab9f5e903 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 19 Oct 2024 05:10:33 +1100 Subject: [PATCH 12/67] Mute org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT #115135 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1e46b95bff1ef..4b69eacba7b1a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -362,6 +362,8 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT method: testCCSExecutionOnSearchesWithLimit0 issue: https://github.com/elastic/elasticsearch/issues/115129 +- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT + issue: https://github.com/elastic/elasticsearch/issues/115135 # Examples: # From 16b86a61b180553889e7d3a2747a0e76e7c244b9 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 18 Oct 2024 13:37:31 -0700 Subject: [PATCH 13/67] Remove temporary mutes of compatibility tests (#115140) --- rest-api-spec/build.gradle | 4 ---- x-pack/plugin/build.gradle | 1 - 2 files changed, 5 deletions(-) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 27ae0c7f99db1..a742e83255bbb 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,8 +57,4 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({task -> task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTest("tsdb/20_mapping/disabled source", "temporary until backported") - task.skipTest("logsdb/20_source_mapping/disabled _source is not supported", "temporary until backported") - task.skipTest("tsdb/20_mapping/regular source", "temporary until backported") - task.skipTest("logsdb/20_source_mapping/stored _source mode is not supported", "temporary until backported") }) diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 3e5aaea43a9b9..8297ef5161fb0 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -82,7 +82,6 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") - task.skipTest("wildcard/30_ignore_above_synthetic_source/wildcard field type ignore_above", "Temporary until backported") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") }) From 0c287384e7b1dbf368e222fc3dc10c9ca7c01a0e Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 22:40:27 +0200 Subject: [PATCH 14/67] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to bf163e1 (#114985) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index d80256ee36a17..fb52daf7e164f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -27,7 +27,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:277ebb42c458ef39cb4028f9204f0b3d51d8cd628ea737a65696a1143c3e42fe", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:bf163e1977002301f7b9fd28fe6837a8cb2dd5c83e4cd45fb67fb28d15d5d40f", "-wolfi", "apk" ), From 16bde5189176b6d3fb218e2cd027f207d7c436f0 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 18 Oct 2024 13:48:12 -0700 Subject: [PATCH 15/67] Remove IndexMode#isSyntheticSourceEnabled (#114963) --- .../AnnotatedTextFieldMapper.java | 24 +-- .../test/logsdb/20_source_mapping.yml | 93 +++++++++ .../rest-api-spec/test/tsdb/20_mapping.yml | 30 +++ .../org/elasticsearch/index/IndexMode.java | 16 +- .../index/mapper/BinaryFieldMapper.java | 18 +- .../index/mapper/DynamicFieldsBuilder.java | 16 +- .../index/mapper/MappingParser.java | 5 +- .../index/mapper/SourceFieldMapper.java | 190 +++++++----------- .../index/mapper/TextFieldMapper.java | 25 +-- .../index/query/QueryRewriteContext.java | 3 +- .../fielddata/AbstractFieldDataTestCase.java | 10 +- .../index/fielddata/FilterFieldDataTests.java | 9 +- .../fielddata/IndexFieldDataServiceTests.java | 7 +- .../highlight/HighlightBuilderTests.java | 3 +- .../rescore/QueryRescorerBuilderTests.java | 5 +- 15 files changed, 259 insertions(+), 195 deletions(-) diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 709d6892788c4..c12849d545b33 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.StringStoredFieldFieldLoader; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextParams; @@ -91,15 +92,10 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final TextParams.Analyzers analyzers; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter store; - public Builder( - String name, - IndexVersion indexCreatedVersion, - IndexAnalyzers indexAnalyzers, - boolean isSyntheticSourceEnabledViaIndexMode - ) { + public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { super(name); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( @@ -108,10 +104,10 @@ public Builder( m -> builder(m).analyzers.positionIncrementGap.getValue(), indexCreatedVersion ); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; this.store = Parameter.storeParam( m -> builder(m).store.getValue(), - () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + () -> isSyntheticSourceEnabled && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false ); } @@ -172,7 +168,7 @@ public AnnotatedTextFieldMapper build(MapperBuilderContext context) { } public static TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()) + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings())) ); /** @@ -560,12 +556,8 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder( - leafName(), - builder.indexCreatedVersion, - builder.analyzers.indexAnalyzers, - builder.isSyntheticSourceEnabledViaIndexMode - ).init(this); + return new Builder(leafName(), builder.indexCreatedVersion, builder.analyzers.indexAnalyzers, builder.isSyntheticSourceEnabled) + .init(this); } @Override diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 03c8def9f558c..b4709a4e4d176 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -1,3 +1,22 @@ +--- +synthetic _source is default: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + indices.create: + index: test-default-source + body: + settings: + index: + mode: logsdb + - do: + indices.get: + index: test-default-source + + - match: { test-default-source.mappings._source.mode: "synthetic" } + --- stored _source mode is supported: - requires: @@ -57,3 +76,77 @@ disabled _source is not supported: - match: { error.type: "mapper_parsing_exception" } - match: { error.root_cause.0.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } + +--- +include/exclude is not supported with synthetic _source: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + catch: '/filtering the stored _source is incompatible with synthetic source/' + indices.create: + index: test-includes + body: + settings: + index: + mode: logsdb + mappings: + _source: + includes: [a] + + - do: + catch: '/filtering the stored _source is incompatible with synthetic source/' + indices.create: + index: test-excludes + body: + settings: + index: + mode: logsdb + mappings: + _source: + excludes: [b] + +--- +include/exclude is supported with stored _source: + - requires: + cluster_features: ["mapper.source.remove_synthetic_source_only_validation"] + reason: requires new validation logic + + - do: + indices.create: + index: test-includes + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: stored + includes: [a] + + - do: + indices.get: + index: test-includes + + - match: { test-includes.mappings._source.mode: "stored" } + - match: { test-includes.mappings._source.includes: ["a"] } + + - do: + indices.create: + index: test-excludes + body: + settings: + index: + mode: logsdb + mappings: + _source: + mode: stored + excludes: [b] + + - do: + indices.get: + index: test-excludes + + - match: { test-excludes.mappings._source.mode: "stored" } + - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 6a59c7bf75cbf..c5669cd6414b1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -528,6 +528,36 @@ disabled source is not supported: - match: { error.root_cause.0.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + - do: + catch: bad_request + indices.create: + index: tsdb_index + body: + settings: + index: + mode: time_series + routing_path: [k8s.pod.uid] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + _source: + enabled: false + properties: + "@timestamp": + type: date + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + + - match: { error.type: "mapper_parsing_exception" } + - match: { error.root_cause.0.type: "mapper_parsing_exception" } + - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + --- source include/exclude: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 5908bc22e21e2..75ec67f26dd3a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -120,8 +120,8 @@ public boolean shouldValidateTimestamp() { public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) {} @Override - public boolean isSyntheticSourceEnabled() { - return false; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.STORED; } }, TIME_SERIES("time_series") { @@ -223,8 +223,8 @@ public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { } @Override - public boolean isSyntheticSourceEnabled() { - return true; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.SYNTHETIC; } }, LOGSDB("logsdb") { @@ -300,8 +300,8 @@ public void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper) { } @Override - public boolean isSyntheticSourceEnabled() { - return true; + public SourceFieldMapper.Mode defaultSourceMode() { + return SourceFieldMapper.Mode.SYNTHETIC; } @Override @@ -460,9 +460,9 @@ public String getName() { public abstract void validateSourceFieldMapper(SourceFieldMapper sourceFieldMapper); /** - * @return whether synthetic source is the only allowed source mode. + * @return default source mode for this mode */ - public abstract boolean isSyntheticSourceEnabled(); + public abstract SourceFieldMapper.Mode defaultSourceMode(); public String getDefaultCodec() { return CodecService.DEFAULT_CODEC; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 06bf66a4a09c6..87c123d71aae5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -49,13 +49,13 @@ public static class Builder extends FieldMapper.Builder { private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); private final Parameter> meta = Parameter.metaParam(); - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter hasDocValues; - public Builder(String name, boolean isSyntheticSourceEnabledViaIndexMode) { + public Builder(String name, boolean isSyntheticSourceEnabled) { super(name); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; - this.hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, isSyntheticSourceEnabledViaIndexMode); + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; + this.hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, isSyntheticSourceEnabled); } @Override @@ -79,9 +79,7 @@ public BinaryFieldMapper build(MapperBuilderContext context) { } } - public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.getIndexSettings().getMode().isSyntheticSourceEnabled()) - ); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, SourceFieldMapper.isSynthetic(c.getIndexSettings()))); public static final class BinaryFieldType extends MappedFieldType { private BinaryFieldType(String name, boolean isStored, boolean hasDocValues, Map meta) { @@ -140,13 +138,13 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final boolean stored; private final boolean hasDocValues; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; protected BinaryFieldMapper(String simpleName, MappedFieldType mappedFieldType, BuilderParams builderParams, Builder builder) { super(simpleName, mappedFieldType, builderParams); this.stored = builder.stored.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); - this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = builder.isSyntheticSourceEnabled; } @Override @@ -186,7 +184,7 @@ public void indexValue(DocumentParserContext context, byte[] value) { @Override public FieldMapper.Builder getMergeBuilder() { - return new BinaryFieldMapper.Builder(leafName(), isSyntheticSourceEnabledViaIndexMode).init(this); + return new BinaryFieldMapper.Builder(leafName(), isSyntheticSourceEnabled).init(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java index 4b6419b85e155..0793dd748c67e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java @@ -334,13 +334,10 @@ public boolean newDynamicStringField(DocumentParserContext context, String name) ); } else { return createDynamicField( - new TextFieldMapper.Builder( - name, - context.indexAnalyzers(), - context.indexSettings().getMode().isSyntheticSourceEnabled() - ).addMultiField( - new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) - ), + new TextFieldMapper.Builder(name, context.indexAnalyzers(), SourceFieldMapper.isSynthetic(context.indexSettings())) + .addMultiField( + new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) + ), context ); } @@ -412,10 +409,7 @@ public boolean newDynamicDateField(DocumentParserContext context, String name, D } boolean newDynamicBinaryField(DocumentParserContext context, String name) throws IOException { - return createDynamicField( - new BinaryFieldMapper.Builder(name, context.indexSettings().getMode().isSyntheticSourceEnabled()), - context - ); + return createDynamicField(new BinaryFieldMapper.Builder(name, SourceFieldMapper.isSynthetic(context.indexSettings())), context); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 9afa77161bef1..f30a0089e4eff 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -124,7 +124,10 @@ Mapping parse(@Nullable String type, MergeReason reason, Map map Map, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get(); Map meta = null; - boolean isSourceSynthetic = mappingParserContext.getIndexSettings().getMode().isSyntheticSourceEnabled(); + // TODO this should be the final value once `_source.mode` mapping parameter is not used anymore + // and it should not be reassigned below. + // For now it is still possible to set `_source.mode` so this is correct. + boolean isSourceSynthetic = SourceFieldMapper.isSynthetic(mappingParserContext.getIndexSettings()); boolean isDataStream = false; Iterator> iterator = mappingSource.entrySet().iterator(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd09dc6ea0c5c..372e0bbdfecf4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -64,10 +64,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting(SourceFieldMapper.Mode.class, settings -> { final IndexMode indexMode = IndexSettings.MODE.get(settings); - return switch (indexMode) { - case IndexMode.LOGSDB, IndexMode.TIME_SERIES -> Mode.SYNTHETIC.name(); - default -> Mode.STORED.name(); - }; + return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); /** The source mode */ @@ -81,68 +78,28 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper DEFAULT_DISABLED = new SourceFieldMapper( - Mode.DISABLED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper DEFAULT_SYNTHETIC = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - null - ); - - private static final SourceFieldMapper TSDB_DEFAULT = new SourceFieldMapper( - Mode.SYNTHETIC, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper( + private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper( + private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB + Strings.EMPTY_ARRAY ); - private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper( - Mode.STORED, - Explicit.IMPLICIT_TRUE, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.LOGSDB - ); - - /* - * Synthetic source was added as the default for TSDB in v.8.7. The legacy field mapper below - * is used in bwc tests and mixed clusters containing time series indexes created in an earlier version. - */ - private static final SourceFieldMapper TSDB_LEGACY_DEFAULT = new SourceFieldMapper( - null, + private static final SourceFieldMapper DISABLED = new SourceFieldMapper( + Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - IndexMode.TIME_SERIES + Strings.EMPTY_ARRAY ); public static class Defaults { @@ -221,12 +178,7 @@ protected Parameter[] getParameters() { return new Parameter[] { enabled, mode, includes, excludes }; } - private boolean isDefault(final Mode sourceMode) { - if (sourceMode != null - && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && sourceMode == Mode.SYNTHETIC) == false) - || sourceMode == Mode.DISABLED)) { - return false; - } + private boolean isDefault() { return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); } @@ -237,15 +189,9 @@ public SourceFieldMapper build() { throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters"); } } - // NOTE: if the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source` - // otherwise the mode is determined according to `index.mode` and `_source.mode`. - final Mode sourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings) - ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings) - : mode.get(); - if (isDefault(sourceMode)) { - return resolveSourceMode(indexMode, sourceMode == null ? Mode.STORED : sourceMode); - } + final Mode sourceMode = resolveSourceMode(); + if (supportsNonDefaultParameterValues == false) { List disallowed = new ArrayList<>(); if (enabled.get().value() == false) { @@ -269,61 +215,75 @@ public SourceFieldMapper build() { } } - SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( - sourceMode, - enabled.get(), - includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY), - indexMode - ); + if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { + throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); + } + + SourceFieldMapper sourceFieldMapper; + if (isDefault()) { + // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. + if (sourceMode == null) { + sourceFieldMapper = DEFAULT; + } else { + sourceFieldMapper = resolveStaticInstance(sourceMode); + } + } else { + sourceFieldMapper = new SourceFieldMapper( + sourceMode, + enabled.get(), + includes.getValue().toArray(Strings.EMPTY_ARRAY), + excludes.getValue().toArray(Strings.EMPTY_ARRAY) + ); + } if (indexMode != null) { indexMode.validateSourceFieldMapper(sourceFieldMapper); } return sourceFieldMapper; } - } + private Mode resolveSourceMode() { + // If the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source` + // otherwise the mode is determined according to `_source.mode`. + if (INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings)) { + return INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings); + } - private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode) { - switch (indexMode) { - case STANDARD: - switch (sourceMode) { - case SYNTHETIC: - return DEFAULT_SYNTHETIC; - case STORED: - return DEFAULT; - case DISABLED: - return DEFAULT_DISABLED; - default: - throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); + // If `_source.mode` is not set we need to apply a default according to index mode. + if (mode.get() == null) { + if (indexMode == null || indexMode == IndexMode.STANDARD) { + // Special case to avoid serializing mode. + return null; } - case TIME_SERIES: - case LOGSDB: - switch (sourceMode) { - case SYNTHETIC: - return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT; - case STORED: - return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED; - case DISABLED: - throw new IllegalArgumentException("_source can not be disabled in index using [" + indexMode + "] index mode"); - default: - throw new IllegalArgumentException("Unsupported source mode: " + sourceMode); - } - default: - throw new IllegalArgumentException("Unsupported index mode: " + indexMode); + + return indexMode.defaultSourceMode(); + } + + return mode.get(); } } + private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { + return switch (sourceMode) { + case SYNTHETIC -> SYNTHETIC; + case STORED -> STORED; + case DISABLED -> DISABLED; + }; + } + public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { final IndexMode indexMode = c.getIndexSettings().getMode(); - final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); - if (indexMode.isSyntheticSourceEnabled()) { - if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { - return TSDB_LEGACY_DEFAULT; - } + if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) { + return DEFAULT; + } + + final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings()); + // Needed for bwc so that "mode" is not serialized in case of standard index with stored source. + if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { + return DEFAULT; } - return resolveSourceMode(indexMode, settingSourceMode == null ? Mode.STORED : settingSourceMode); + + return resolveStaticInstance(settingSourceMode); }, c -> new Builder( c.getIndexSettings().getMode(), @@ -380,21 +340,14 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private final IndexMode indexMode; - - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, IndexMode indexMode) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); - assert enabled.explicit() == false || mode == null; this.mode = mode; this.enabled = enabled; this.sourceFilter = buildSourceFilter(includes, excludes); this.includes = includes; this.excludes = excludes; - if (this.sourceFilter != null && (mode == Mode.SYNTHETIC || indexMode == IndexMode.TIME_SERIES)) { - throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); - } this.complete = stored() && sourceFilter == null; - this.indexMode = indexMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -432,9 +385,6 @@ public void preParse(DocumentParserContext context) throws IOException { final BytesReference adaptedSource = applyFilters(originalSource, contentType); if (adaptedSource != null) { - assert context.indexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0) - || indexMode == null - || indexMode.isSyntheticSourceEnabled() == false; final BytesRef ref = adaptedSource.toBytesRef(); context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } @@ -468,7 +418,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false).init(this); } /** @@ -485,6 +435,10 @@ public boolean isSynthetic() { return mode == Mode.SYNTHETIC; } + public static boolean isSynthetic(IndexSettings indexSettings) { + return INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexSettings.getSettings()) == SourceFieldMapper.Mode.SYNTHETIC; + } + public boolean isDisabled() { return mode == Mode.DISABLED; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 0a3911a73a2fc..642539fbbc2f8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -239,7 +239,7 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final Parameter store; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private final Parameter index = Parameter.indexParam(m -> ((TextFieldMapper) m).index, true); @@ -286,16 +286,11 @@ public static class Builder extends FieldMapper.Builder { final TextParams.Analyzers analyzers; - public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabledViaIndexMode) { - this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabledViaIndexMode); + public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { + this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabled); } - public Builder( - String name, - IndexVersion indexCreatedVersion, - IndexAnalyzers indexAnalyzers, - boolean isSyntheticSourceEnabledViaIndexMode - ) { + public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabled) { super(name); // If synthetic source is used we need to either store this field @@ -306,7 +301,7 @@ public Builder( // If 'store' parameter was explicitly provided we'll reject the request. this.store = Parameter.storeParam( m -> ((TextFieldMapper) m).store, - () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + () -> isSyntheticSourceEnabled && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false ); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( @@ -315,7 +310,7 @@ public Builder( m -> (((TextFieldMapper) m).positionIncrementGap), indexCreatedVersion ); - this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = isSyntheticSourceEnabled; } public Builder index(boolean index) { @@ -488,7 +483,7 @@ public TextFieldMapper build(MapperBuilderContext context) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()), + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), SourceFieldMapper.isSynthetic(c.getIndexSettings())), MINIMUM_COMPATIBILITY_VERSION ); @@ -1242,7 +1237,7 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; - private final boolean isSyntheticSourceEnabledViaIndexMode; + private final boolean isSyntheticSourceEnabled; private TextFieldMapper( String simpleName, @@ -1275,7 +1270,7 @@ private TextFieldMapper( this.indexPrefixes = builder.indexPrefixes.getValue(); this.freqFilter = builder.freqFilter.getValue(); this.fieldData = builder.fieldData.get(); - this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; + this.isSyntheticSourceEnabled = builder.isSyntheticSourceEnabled; } @Override @@ -1299,7 +1294,7 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabledViaIndexMode).init(this); + return new Builder(leafName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabled).init(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 157ed617f3eb5..8808cd79072f6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; @@ -235,7 +236,7 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, getIndexAnalyzers(), - getIndexSettings() != null && getIndexSettings().getMode().isSyntheticSourceEnabled() + getIndexSettings() != null && SourceFieldMapper.isSynthetic(getIndexSettings()) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } else { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 83f668f20de7b..f809a53d753fb 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -94,7 +95,7 @@ public > IFD getForField(String type, String field fieldType = new TextFieldMapper.Builder( fieldName, createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); } } else if (type.equals("float")) { @@ -162,10 +163,9 @@ public > IFD getForField(String type, String field docValues ).build(context).fieldType(); } else if (type.equals("binary")) { - fieldType = new BinaryFieldMapper.Builder(fieldName, indexService.getIndexSettings().getMode().isSyntheticSourceEnabled()) - .docValues(docValues) - .build(context) - .fieldType(); + fieldType = new BinaryFieldMapper.Builder(fieldName, SourceFieldMapper.isSynthetic(indexService.getIndexSettings())).docValues( + docValues + ).build(context).fieldType(); } else { throw new UnsupportedOperationException(type); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index b0a30211c0f47..a7277b79e5c00 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import java.util.List; @@ -56,7 +57,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "high_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0).build(builderContext).fieldType(); IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft, MappedFieldType.FielddataOperation.SEARCH); for (LeafReaderContext context : contexts) { @@ -72,7 +73,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "high_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, 201, 100) .build(builderContext) @@ -91,7 +92,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "med_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) @@ -111,7 +112,7 @@ public void testFilterByFrequency() throws Exception { MappedFieldType ft = new TextFieldMapper.Builder( "med_freq", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 7616ea5119b6c..36c25b352a792 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -163,12 +164,12 @@ public void testClearField() throws Exception { final MappedFieldType mapper1 = new TextFieldMapper.Builder( "field_1", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final MappedFieldType mapper2 = new TextFieldMapper.Builder( "field_2", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); @@ -234,7 +235,7 @@ public void testFieldDataCacheListener() throws Exception { final MappedFieldType mapper1 = new TextFieldMapper.Builder( "s", createDefaultIndexAnalyzers(), - indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(indexService.getIndexSettings()) ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 138ee899dd906..3699cdee3912b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -323,7 +324,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 4a02e84bbe4f8..209dfdcc16969 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -166,7 +167,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } @@ -233,7 +234,7 @@ public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder( name, createDefaultIndexAnalyzers(), - idxSettings.getMode().isSyntheticSourceEnabled() + SourceFieldMapper.isSynthetic(idxSettings) ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } From cc0da6d30991c4a837ca8332f29c3230a2585c69 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 18 Oct 2024 14:10:11 -0700 Subject: [PATCH 16/67] Upgrade develocity plugin (#115139) --- gradle/build.versions.toml | 2 +- gradle/verification-metadata.xml | 5 +++++ plugins/examples/settings.gradle | 2 +- settings.gradle | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 35c26ef10f9ec..d11c4b7fd9c91 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -17,7 +17,7 @@ commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" forbiddenApis = "de.thetaphi:forbiddenapis:3.6" -gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.17.4" +gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.18.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.14" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0b5c1ae6528f9..0156f13b4b05d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -799,6 +799,11 @@ + + + + + diff --git a/plugins/examples/settings.gradle b/plugins/examples/settings.gradle index 78248ecab92d2..1f168525d4b1d 100644 --- a/plugins/examples/settings.gradle +++ b/plugins/examples/settings.gradle @@ -8,7 +8,7 @@ */ plugins { - id "com.gradle.develocity" version "3.17.4" + id "com.gradle.develocity" version "3.18.1" } // Include all subdirectories as example projects diff --git a/settings.gradle b/settings.gradle index be0844de1164a..a95a46a3569d7 100644 --- a/settings.gradle +++ b/settings.gradle @@ -17,7 +17,7 @@ pluginManagement { } plugins { - id "com.gradle.develocity" version "3.17.4" + id "com.gradle.develocity" version "3.18.1" id 'elasticsearch.java-toolchain' } From 68f0f00dd181317d2071403cb959fe9019ab8587 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Fri, 18 Oct 2024 16:02:28 -0700 Subject: [PATCH 17/67] Add initial entitlement policy parsing (#114448) This change adds entitlement policy parsing with the following design: * YAML file for readability and re-use of our x-content parsers * hierarchical structure to group entitlements under a single scope * no general entitlements without a scope or for the entire project --- .../tools/entitlement-runtime/build.gradle | 6 +- .../src/main/java/module-info.java | 1 + .../runtime/policy/Entitlement.java | 19 ++ .../runtime/policy/ExternalEntitlement.java | 36 ++++ .../runtime/policy/FileEntitlement.java | 67 +++++++ .../entitlement/runtime/policy/Policy.java | 46 +++++ .../runtime/policy/PolicyParser.java | 176 ++++++++++++++++++ .../runtime/policy/PolicyParserException.java | 92 +++++++++ .../entitlement/runtime/policy/Scope.java | 46 +++++ .../policy/PolicyParserFailureTests.java | 83 +++++++++ .../runtime/policy/PolicyParserTests.java | 28 +++ .../runtime/policy/test-policy.yaml | 7 + 12 files changed, 602 insertions(+), 5 deletions(-) create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java create mode 100644 distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java create mode 100644 distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java create mode 100644 distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml diff --git a/distribution/tools/entitlement-runtime/build.gradle b/distribution/tools/entitlement-runtime/build.gradle index 0fb7bdec883f8..55471272c1b5f 100644 --- a/distribution/tools/entitlement-runtime/build.gradle +++ b/distribution/tools/entitlement-runtime/build.gradle @@ -11,16 +11,12 @@ apply plugin: 'elasticsearch.publish' dependencies { compileOnly project(':libs:elasticsearch-core') // For @SuppressForbidden + compileOnly project(":libs:elasticsearch-x-content") // for parsing policy files compileOnly project(':server') // To access the main server module for special permission checks compileOnly project(':distribution:tools:entitlement-bridge') - testImplementation project(":test:framework") } tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } - -tasks.named('forbiddenApisMain').configure { - replaceSignatureFiles 'jdk-signatures' -} diff --git a/distribution/tools/entitlement-runtime/src/main/java/module-info.java b/distribution/tools/entitlement-runtime/src/main/java/module-info.java index d0bfc804f8024..12e6905014512 100644 --- a/distribution/tools/entitlement-runtime/src/main/java/module-info.java +++ b/distribution/tools/entitlement-runtime/src/main/java/module-info.java @@ -9,6 +9,7 @@ module org.elasticsearch.entitlement.runtime { requires org.elasticsearch.entitlement.bridge; + requires org.elasticsearch.xcontent; requires org.elasticsearch.server; exports org.elasticsearch.entitlement.runtime.api; diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java new file mode 100644 index 0000000000000..5b53c399cc1b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * Marker interface to ensure that only {@link Entitlement} are + * part of a {@link Policy}. All entitlement classes should implement + * this. + */ +public interface Entitlement { + +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java new file mode 100644 index 0000000000000..bb1205696b49e --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation indicates an {@link Entitlement} is available + * to "external" classes such as those used in plugins. Any {@link Entitlement} + * using this annotation is considered parseable as part of a policy file + * for entitlements. + */ +@Target(ElementType.CONSTRUCTOR) +@Retention(RetentionPolicy.RUNTIME) +public @interface ExternalEntitlement { + + /** + * This is the list of parameter names that are + * parseable in {@link PolicyParser#parseEntitlement(String, String)}. + * The number and order of parameter names much match the number and order + * of constructor parameters as this is how the parser will pass in the + * parsed values from a policy file. However, the names themselves do NOT + * have to match the parameter names of the constructor. + */ + String[] parameterNames() default {}; +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java new file mode 100644 index 0000000000000..8df199591d3e4 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.List; +import java.util.Objects; + +/** + * Describes a file entitlement with a path and actions. + */ +public class FileEntitlement implements Entitlement { + + public static final int READ_ACTION = 0x1; + public static final int WRITE_ACTION = 0x2; + + private final String path; + private final int actions; + + @ExternalEntitlement(parameterNames = { "path", "actions" }) + public FileEntitlement(String path, List actionsList) { + this.path = path; + int actionsInt = 0; + + for (String actionString : actionsList) { + if ("read".equals(actionString)) { + if ((actionsInt & READ_ACTION) == READ_ACTION) { + throw new IllegalArgumentException("file action [read] specified multiple times"); + } + actionsInt |= READ_ACTION; + } else if ("write".equals(actionString)) { + if ((actionsInt & WRITE_ACTION) == WRITE_ACTION) { + throw new IllegalArgumentException("file action [write] specified multiple times"); + } + actionsInt |= WRITE_ACTION; + } else { + throw new IllegalArgumentException("unknown file action [" + actionString + "]"); + } + } + + this.actions = actionsInt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileEntitlement that = (FileEntitlement) o; + return actions == that.actions && Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hash(path, actions); + } + + @Override + public String toString() { + return "FileEntitlement{" + "path='" + path + '\'' + ", actions=" + actions + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java new file mode 100644 index 0000000000000..e8bd7a3fff357 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Policy.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A holder for scoped entitlements. + */ +public class Policy { + + public final String name; + public final List scopes; + + public Policy(String name, List scopes) { + this.name = Objects.requireNonNull(name); + this.scopes = Collections.unmodifiableList(Objects.requireNonNull(scopes)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Policy policy = (Policy) o; + return Objects.equals(name, policy.name) && Objects.equals(scopes, policy.scopes); + } + + @Override + public int hashCode() { + return Objects.hash(name, scopes); + } + + @Override + public String toString() { + return "Policy{" + "name='" + name + '\'' + ", scopes=" + scopes + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java new file mode 100644 index 0000000000000..229ccec3b8b2c --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.yaml.YamlXContent; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.entitlement.runtime.policy.PolicyParserException.newPolicyParserException; + +/** + * A parser to parse policy files for entitlements. + */ +public class PolicyParser { + + protected static final ParseField ENTITLEMENTS_PARSEFIELD = new ParseField("entitlements"); + + protected static final String entitlementPackageName = Entitlement.class.getPackage().getName(); + + protected final XContentParser policyParser; + protected final String policyName; + + public PolicyParser(InputStream inputStream, String policyName) throws IOException { + this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); + this.policyName = policyName; + } + + public Policy parsePolicy() { + try { + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException("expected object "); + } + List scopes = new ArrayList<>(); + while (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + if (policyParser.currentToken() != XContentParser.Token.FIELD_NAME) { + throw newPolicyParserException("expected object "); + } + String scopeName = policyParser.currentName(); + Scope scope = parseScope(scopeName); + scopes.add(scope); + } + return new Policy(policyName, scopes); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected Scope parseScope(String scopeName) throws IOException { + try { + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); + } + if (policyParser.nextToken() != XContentParser.Token.FIELD_NAME + || policyParser.currentName().equals(ENTITLEMENTS_PARSEFIELD.getPreferredName()) == false) { + throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); + } + if (policyParser.nextToken() != XContentParser.Token.START_ARRAY) { + throw newPolicyParserException(scopeName, "expected array of "); + } + List entitlements = new ArrayList<>(); + while (policyParser.nextToken() != XContentParser.Token.END_ARRAY) { + if (policyParser.currentToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, "expected object "); + } + if (policyParser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw newPolicyParserException(scopeName, "expected object "); + } + String entitlementType = policyParser.currentName(); + Entitlement entitlement = parseEntitlement(scopeName, entitlementType); + entitlements.add(entitlement); + if (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + throw newPolicyParserException(scopeName, "expected closing object"); + } + } + if (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { + throw newPolicyParserException(scopeName, "expected closing object"); + } + return new Scope(scopeName, entitlements); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected Entitlement parseEntitlement(String scopeName, String entitlementType) throws IOException { + Class entitlementClass; + try { + entitlementClass = Class.forName( + entitlementPackageName + + "." + + Character.toUpperCase(entitlementType.charAt(0)) + + entitlementType.substring(1) + + "Entitlement" + ); + } catch (ClassNotFoundException cnfe) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + if (Entitlement.class.isAssignableFrom(entitlementClass) == false) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + Constructor entitlementConstructor = entitlementClass.getConstructors()[0]; + ExternalEntitlement entitlementMetadata = entitlementConstructor.getAnnotation(ExternalEntitlement.class); + if (entitlementMetadata == null) { + throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); + } + + if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { + throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); + } + Map parsedValues = policyParser.map(); + + Class[] parameterTypes = entitlementConstructor.getParameterTypes(); + String[] parametersNames = entitlementMetadata.parameterNames(); + Object[] parameterValues = new Object[parameterTypes.length]; + for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { + String parameterName = parametersNames[parameterIndex]; + Object parameterValue = parsedValues.remove(parameterName); + if (parameterValue == null) { + throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); + } + Class parameterType = parameterTypes[parameterIndex]; + if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { + throw newPolicyParserException( + scopeName, + entitlementType, + "unexpected parameter type [" + parameterType.getSimpleName() + "] for entitlement parameter [" + parameterName + "]" + ); + } + parameterValues[parameterIndex] = parameterValue; + } + if (parsedValues.isEmpty() == false) { + throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); + } + + try { + return (Entitlement) entitlementConstructor.newInstance(parameterValues); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + throw new IllegalStateException("internal error"); + } + } + + protected PolicyParserException newPolicyParserException(String message) { + return PolicyParserException.newPolicyParserException(policyParser.getTokenLocation(), policyName, message); + } + + protected PolicyParserException newPolicyParserException(String scopeName, String message) { + return PolicyParserException.newPolicyParserException(policyParser.getTokenLocation(), policyName, scopeName, message); + } + + protected PolicyParserException newPolicyParserException(String scopeName, String entitlementType, String message) { + return PolicyParserException.newPolicyParserException( + policyParser.getTokenLocation(), + policyName, + scopeName, + entitlementType, + message + ); + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java new file mode 100644 index 0000000000000..5dfa12f11d0be --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.xcontent.XContentLocation; + +/** + * An exception specifically for policy parsing errors. + */ +public class PolicyParserException extends RuntimeException { + + public static PolicyParserException newPolicyParserException(XContentLocation location, String policyName, String message) { + return new PolicyParserException( + "[" + location.lineNumber() + ":" + location.columnNumber() + "] policy parsing error for [" + policyName + "]: " + message + ); + } + + public static PolicyParserException newPolicyParserException( + XContentLocation location, + String policyName, + String scopeName, + String message + ) { + if (scopeName == null) { + return new PolicyParserException( + "[" + location.lineNumber() + ":" + location.columnNumber() + "] policy parsing error for [" + policyName + "]: " + message + ); + } else { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] in scope [" + + scopeName + + "]: " + + message + ); + } + } + + public static PolicyParserException newPolicyParserException( + XContentLocation location, + String policyName, + String scopeName, + String entitlementType, + String message + ) { + if (scopeName == null) { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] for entitlement type [" + + entitlementType + + "]: " + + message + ); + } else { + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] in scope [" + + scopeName + + "] for entitlement type [" + + entitlementType + + "]: " + + message + ); + } + } + + private PolicyParserException(String message) { + super(message); + } +} diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java new file mode 100644 index 0000000000000..0fe63eb8da1b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A holder for entitlements within a single scope. + */ +public class Scope { + + public final String name; + public final List entitlements; + + public Scope(String name, List entitlements) { + this.name = Objects.requireNonNull(name); + this.entitlements = Collections.unmodifiableList(Objects.requireNonNull(entitlements)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Scope scope = (Scope) o; + return Objects.equals(name, scope.name) && Objects.equals(entitlements, scope.entitlements); + } + + @Override + public int hashCode() { + return Objects.hash(name, entitlements); + } + + @Override + public String toString() { + return "Scope{" + "name='" + name + '\'' + ", entitlements=" + entitlements + '}'; + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java new file mode 100644 index 0000000000000..b21d206f3eb6a --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +public class PolicyParserFailureTests extends ESTestCase { + + public void testParserSyntaxFailures() { + PolicyParserException ppe = expectThrows( + PolicyParserException.class, + () -> new PolicyParser(new ByteArrayInputStream("[]".getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml") + .parsePolicy() + ); + assertEquals("[1:1] policy parsing error for [test-failure-policy.yaml]: expected object ", ppe.getMessage()); + } + + public void testEntitlementDoesNotExist() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - does_not_exist: {} + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[3:7] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + + "unknown entitlement type [does_not_exist]", + ppe.getMessage() + ); + } + + public void testEntitlementMissingParameter() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: {} + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[3:14] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: missing entitlement parameter [path]", + ppe.getMessage() + ); + + ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: + path: test-path + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[5:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: missing entitlement parameter [actions]", + ppe.getMessage() + ); + } + + public void testEntitlementExtraneousParameter() throws IOException { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + entitlements: + - file: + path: test-path + actions: + - read + extra: test + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + assertEquals( + "[8:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", + ppe.getMessage() + ); + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java new file mode 100644 index 0000000000000..40016b2e3027e --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.List; + +public class PolicyParserTests extends ESTestCase { + + public void testPolicyBuilder() throws IOException { + Policy parsedPolicy = new PolicyParser(PolicyParserTests.class.getResourceAsStream("test-policy.yaml"), "test-policy.yaml") + .parsePolicy(); + Policy builtPolicy = new Policy( + "test-policy.yaml", + List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + ); + assertEquals(parsedPolicy, builtPolicy); + } +} diff --git a/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml new file mode 100644 index 0000000000000..b58287cfc83b7 --- /dev/null +++ b/distribution/tools/entitlement-runtime/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -0,0 +1,7 @@ +entitlement-module-name: + entitlements: + - file: + path: "test/path/to/file" + actions: + - "read" + - "write" From ac25dbe70692df19bd424e7ef1e4bc2c16c41329 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 18 Oct 2024 20:19:30 -0400 Subject: [PATCH 18/67] Fix IPinfo geolocation schema (#115147) --- docs/changelog/115147.yaml | 5 ++ .../ingest/geoip/IpinfoIpDataLookups.java | 17 ++--- .../ingest/geoip/GeoIpProcessorTests.java | 6 +- .../geoip/IpinfoIpDataLookupsTests.java | 65 +++++++++--------- .../src/test/resources/ipinfo/asn_sample.mmdb | Bin 25210 -> 25728 bytes .../test/resources/ipinfo/ip_asn_sample.mmdb | Bin 23456 -> 24333 bytes .../resources/ipinfo/ip_country_sample.mmdb | Bin 32292 -> 30088 bytes .../ipinfo/ip_geolocation_sample.mmdb | Bin 33552 -> 0 bytes .../ip_geolocation_standard_sample.mmdb | Bin 0 -> 30105 bytes .../ipinfo/privacy_detection_sample.mmdb | Bin 26352 -> 26456 bytes 10 files changed, 50 insertions(+), 43 deletions(-) create mode 100644 docs/changelog/115147.yaml delete mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb diff --git a/docs/changelog/115147.yaml b/docs/changelog/115147.yaml new file mode 100644 index 0000000000000..36f40bba1da17 --- /dev/null +++ b/docs/changelog/115147.yaml @@ -0,0 +1,5 @@ +pr: 115147 +summary: Fix IPinfo geolocation schema +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java index 5a13ea93ff032..8ce2424844d9d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java @@ -218,8 +218,8 @@ public record CountryResult( public record GeolocationResult( String city, String country, - Double latitude, - Double longitude, + Double lat, + Double lng, String postalCode, String region, String timezone @@ -229,14 +229,15 @@ public record GeolocationResult( public GeolocationResult( @MaxMindDbParameter(name = "city") String city, @MaxMindDbParameter(name = "country") String country, - @MaxMindDbParameter(name = "latitude") String latitude, - @MaxMindDbParameter(name = "longitude") String longitude, - // @MaxMindDbParameter(name = "network") String network, // for now we're not exposing this + // @MaxMindDbParameter(name = "geoname_id") String geonameId, // for now we're not exposing this + @MaxMindDbParameter(name = "lat") String lat, + @MaxMindDbParameter(name = "lng") String lng, @MaxMindDbParameter(name = "postal_code") String postalCode, @MaxMindDbParameter(name = "region") String region, + // @MaxMindDbParameter(name = "region_code") String regionCode, // for now we're not exposing this @MaxMindDbParameter(name = "timezone") String timezone ) { - this(city, country, parseLocationDouble(latitude), parseLocationDouble(longitude), postalCode, region, timezone); + this(city, country, parseLocationDouble(lat), parseLocationDouble(lng), postalCode, region, timezone); } } @@ -395,8 +396,8 @@ protected Map transform(final Result result) } } case LOCATION -> { - Double latitude = response.latitude; - Double longitude = response.longitude; + Double latitude = response.lat; + Double longitude = response.lng; if (latitude != null && longitude != null) { Map locationObject = new HashMap<>(); locationObject.put("lat", latitude); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 640480ed277c5..4548e92239ce1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -82,13 +82,13 @@ public void testMaxmindCity() throws Exception { } public void testIpinfoGeolocation() throws Exception { - String ip = "13.107.39.238"; + String ip = "72.20.12.220"; GeoIpProcessor processor = new GeoIpProcessor( IP_LOCATION_TYPE, // n.b. this is an "ip_location" processor randomAlphaOfLength(10), null, "source_field", - loader("ipinfo/ip_geolocation_sample.mmdb"), + loader("ipinfo/ip_geolocation_standard_sample.mmdb"), () -> true, "target_field", getIpinfoGeolocationLookup(), @@ -107,7 +107,7 @@ public void testIpinfoGeolocation() throws Exception { Map data = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(data, notNullValue()); assertThat(data.get("ip"), equalTo(ip)); - assertThat(data.get("city_name"), equalTo("Des Moines")); + assertThat(data.get("city_name"), equalTo("Chicago")); // see IpinfoIpDataLookupsTests for more tests of the data lookup behavior } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index e998748efbcad..d0cdc5a3e1b5e 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -102,17 +102,17 @@ public void testParseLocationDouble() { public void testAsnFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_asn_sample.mmdb"; - String ip = "5.182.109.0"; + String ip = "23.32.184.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "M247 Europe SRL"), - entry("asn", 9009L), - entry("network", "5.182.109.0/24"), - entry("domain", "m247.com") + entry("organization_name", "Akamai Technologies, Inc."), + entry("asn", 16625L), + entry("network", "23.32.184.0/21"), + entry("domain", "akamai.com") ) ); } @@ -120,17 +120,17 @@ public void testAsnFree() { public void testAsnStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "asn_sample.mmdb"; - String ip = "23.53.116.0"; + String ip = "69.19.224.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "Akamai Technologies, Inc."), - entry("asn", 32787L), - entry("network", "23.53.116.0/24"), - entry("domain", "akamai.com"), + entry("organization_name", "TPx Communications"), + entry("asn", 14265L), + entry("network", "69.19.224.0/22"), + entry("domain", "tpx.com"), entry("type", "hosting"), entry("country_iso_code", "US") ) @@ -177,25 +177,25 @@ public void testAsnInvariants() { public void testCountryFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_country_sample.mmdb"; - String ip = "4.221.143.168"; + String ip = "20.33.76.0"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Country(Database.CountryV2.properties()), Map.ofEntries( entry("ip", ip), - entry("country_name", "South Africa"), - entry("country_iso_code", "ZA"), - entry("continent_name", "Africa"), - entry("continent_code", "AF") + entry("country_name", "Ireland"), + entry("country_iso_code", "IE"), + entry("continent_name", "Europe"), + entry("continent_code", "EU") ) ); } public void testGeolocationStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); - String databaseName = "ip_geolocation_sample.mmdb"; - String ip = "2.124.90.182"; + String databaseName = "ip_geolocation_standard_sample.mmdb"; + String ip = "62.69.48.19"; assertExpectedLookupResults( databaseName, ip, @@ -215,36 +215,37 @@ public void testGeolocationStandard() { public void testGeolocationInvariants() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); Path configDir = tmpDir; - copyDatabase("ipinfo/ip_geolocation_sample.mmdb", configDir.resolve("ip_geolocation_sample.mmdb")); + copyDatabase("ipinfo/ip_geolocation_standard_sample.mmdb", configDir.resolve("ip_geolocation_standard_sample.mmdb")); { final Set expectedColumns = Set.of( - "network", "city", + "geoname_id", "region", + "region_code", "country", "postal_code", "timezone", - "latitude", - "longitude" + "lat", + "lng" ); - Path databasePath = configDir.resolve("ip_geolocation_sample.mmdb"); + Path databasePath = configDir.resolve("ip_geolocation_standard_sample.mmdb"); assertDatabaseInvariants(databasePath, (ip, row) -> { assertThat(row.keySet(), equalTo(expectedColumns)); { - String latitude = (String) row.get("latitude"); + String latitude = (String) row.get("lat"); assertThat(latitude, equalTo(latitude.trim())); Double parsed = parseLocationDouble(latitude); assertThat(parsed, notNullValue()); - assertThat(latitude, equalTo(Double.toString(parsed))); // reverse it + assertThat(Double.parseDouble(latitude), equalTo(Double.parseDouble(Double.toString(parsed)))); // reverse it } { - String longitude = (String) row.get("longitude"); + String longitude = (String) row.get("lng"); assertThat(longitude, equalTo(longitude.trim())); Double parsed = parseLocationDouble(longitude); assertThat(parsed, notNullValue()); - assertThat(longitude, equalTo(Double.toString(parsed))); // reverse it + assertThat(Double.parseDouble(longitude), equalTo(Double.parseDouble(Double.toString(parsed)))); // reverse it } }); } @@ -253,7 +254,7 @@ public void testGeolocationInvariants() { public void testPrivacyDetectionStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "1.53.59.33"; + String ip = "2.57.109.154"; assertExpectedLookupResults( databaseName, ip, @@ -272,16 +273,16 @@ public void testPrivacyDetectionStandard() { public void testPrivacyDetectionStandardNonEmptyService() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "216.131.74.65"; + String ip = "59.29.201.246"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()), Map.ofEntries( entry("ip", ip), - entry("hosting", true), + entry("hosting", false), entry("proxy", false), - entry("service", "FastVPN"), + entry("service", "VPNGate"), entry("relay", false), entry("tor", false), entry("vpn", true) @@ -391,13 +392,13 @@ public void testDatabaseTypeParsing() throws IOException { // pedantic about where precisely it should be. copyDatabase("ipinfo/ip_asn_sample.mmdb", tmpDir.resolve("ip_asn_sample.mmdb")); - copyDatabase("ipinfo/ip_geolocation_sample.mmdb", tmpDir.resolve("ip_geolocation_sample.mmdb")); + copyDatabase("ipinfo/ip_geolocation_standard_sample.mmdb", tmpDir.resolve("ip_geolocation_standard_sample.mmdb")); copyDatabase("ipinfo/asn_sample.mmdb", tmpDir.resolve("asn_sample.mmdb")); copyDatabase("ipinfo/ip_country_sample.mmdb", tmpDir.resolve("ip_country_sample.mmdb")); copyDatabase("ipinfo/privacy_detection_sample.mmdb", tmpDir.resolve("privacy_detection_sample.mmdb")); assertThat(parseDatabaseFromType("ip_asn_sample.mmdb"), is(Database.AsnV2)); - assertThat(parseDatabaseFromType("ip_geolocation_sample.mmdb"), is(Database.CityV2)); + assertThat(parseDatabaseFromType("ip_geolocation_standard_sample.mmdb"), is(Database.CityV2)); assertThat(parseDatabaseFromType("asn_sample.mmdb"), is(Database.AsnV2)); assertThat(parseDatabaseFromType("ip_country_sample.mmdb"), is(Database.CountryV2)); assertThat(parseDatabaseFromType("privacy_detection_sample.mmdb"), is(Database.PrivacyDetection)); diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb index 916a8252a5df1d5d2ea15dfb14061e55360d6cd0..289318a124d75d770c4e26d429e5fa592589ed06 100644 GIT binary patch literal 25728 zcmbuF1$Y}r_w^OqahMx$LmPIQSQT1Olv(CjcG4tuQ;}^Yw&Y53sLagF%*>1_Gcz+Y z<9BA}NSYL0`}u!;I{Dpu=I+c6?(E7YlWCC2G^EsIGUbu^Fr)Ab=np0f$sy!Wau_+B z96^pGN0Fn+G2~cs966qxKyE=!Bqx!R$th$Jxg|N3oJLM3XOJ^VgPcXqCg+fI$$8{_ zasgRPE+n@i7m<9eXSH9Y~pmwCHs zdJ=j{^V5v!12g@eFE8ldDri-}is9&$?H$cBp^KW9z&Ezd6)3AxOZ$s?6NVg+Bg>;8@ z-JS67()_#O-^1%N<945xzX|#GGyefie~{WkeGU8sL{VMqy`MRPk{|#zyl5gRCl%{SQt5llosM16iVsK`;cRoo6x)@6h}L>VpwiNPCEu zKa|=qayU5x@gr%EB1bFAdd4VwIOfWram*i2P9V2Xl<}D3gC=SIWcX7US43_}PSxV3 zQJaqV8Ja#5zM=WEOr{YF89RrZOU~2c=2Kfh7L%E4x1zs@T#R~3ls#g9YFk5l1j&Nb zi)2OG8Oes^V~icPgSL~*jFC(EvR~ZH^B~@<>D$2HR`a)mzrE(~!0UEY9D#W}s0^uv zdO7S0+LdILqO7+X`8Cx2nqCXNj(WYOH$ZQs-lXZx)RriYM4f{I)LON;Hfl@BWr*8} z_HuHCqP)MA${u+Z^V-Q>aNR1}yDG}~Aav|g(hidmGOERKZ4Bx}e3z!j7`vKWLv|ys zhjyGyD2lp9Vx11!4e1!9B+`M5Ns(!?k6eqGb;_3ax}N$5awEAr;`X4ur=pDCi`w4g zKIFdSez=bFRK_1*GL7OGN&g_^AI#W86s3MB^usj&aK;=#9*O*;XdkUO>NfhvBJUie zleON#fUti$$qm#chGifdM4k+JU8Mz zw7pu~Hq^Eyw?o|aw0F?rc7*QJ{4!;ayB~T5<15K3vYM<>l=tgLd@c1lvYu?v;u;ax zr1{N^SpvO9+2b)rvdmVpja;fI^OhlgC+f?|734}SPV{fP=I_FoRnT{(9VA0!n2acj z`o<>_-$A_-pT=H?-(@o8i#Z*mzM5P^c9T71oJ^3tLFB>YA>^To@_r7Z zb~t$ic_euh$~&6&G32p|<6owKJb8kmEcZleCy^(Ur=Yx3kuFC%joRtt8RVJdS>)N| zIf}CUb8-E7ntwiHE+8*N{zY2;#qclD{7d0qru+$+d3gn|y^_3&yjoG#dkwC;mil#? zem(RXsNYE5q~+g?{982tR>s^$-j4h`ls(}gE$%MpcWeGVjK7z>kKBZ~`)NNwK1i~> zyoc#OLO!Y}+O@@Y)E-AUPtbmne2RP;d7sjL2KKW^Z_+~hv|m(|{q_1A87iA@IPYS$D00$vL{YM{%6=1 z27#Yz`Cq{QlGl9&`)8!Dwft|G_bvGy^1rA3gBJHAwVzC;i3!@jkiU|@DayA0PVEo! zPn7#tErqWMGN4`b|bas)Y&9Hl7An^c7OG1xzs zf@7IKjvP-;Ah#eVBDM!<64G*{$<(L7u0}$c@?N)u?V>#uHpW8g(_wFgG=u(3MVV(n zpGAGPrq6*sm-;+SpHFQ8SxhcO+v2nrk&DR^MOmMj+SVxFLfcB(NV}qpcOcHG`RGHT zPr}^JcQf8YddY3bZ53rXSdaPJQ{O?;cckVc%gA!F0@qd2u2P)L`&()z+a{LyDB?B z2tB0fVd@bwiu?}Ron#joBUh7a6y<$(D|-s&M?U69{+mcxxA|8i^&*{#v>VcXNJ(Cw zBGY6axt3f`}yHnc(QxV!G z^Vr|gzLwf`DF1q-`;cy+b|ZNcc{6#7qP+fA#NS5!c1^#7+MVQGh`XEiJ>B+!Jm~b<9~tw ztLFa(|98#*gJt{){V!z~n8-n7p5n9-^bsJ+7O7w`QlaJ#p*|G)Fijs0KXd&^#*EVP zM=N_8)_uWPO&8Ge!3TNTO)axXCbZ7Y`M4sJ9P)? zM1E#0T-4l(({ELj_k;2!w?X{2n!X+N?a3XGzoVA#gI}in85mPpMg>_(R*}_YjiQY8 z+JHNOddv+@g;kS%0@Y*m!yx0y^cmQl}eC;H3D6)5v=q?Pn{CflK% ziL?vSZb+-B@2V)v4^j&uKdk8y_)+F{key@~86#IK%JSAw>n3|pPF%}R!0%Q5j3+cb z2|dMY(wg1}eJ%BMu#Z7nk8}Xi1}$zQV|ItW2kkwz{Jp5{P3}YPi}?L$@2@x$b5geD zK=L5+U`3gC2(CL+^ABUp;n0uJ^dsRP#k`{xXX4&v8OLgI$06=`>L-vVYWXK2|77Z? zkf&<-ry>7z>SyHina@!>i#!`~=j8GW&ZT}Hc|Lgoc_HF2qJ6QVy!T6>UrPNl@^bPD z@=Ee5@@n!L@>=pb@_O$XChN6phS0d4u|!CevWdm4dhEzfHbFzDvGGzE6ID>uk90LvS?s5%rJBPZVW) zKSlm$)ITS`z;$`hztrNsg8wzrFO2yH_P1L8cksX0{2v(iBl#2Zf6nF48VUc`oL}%8 z{NEY-2l*%Ym*QX(IY?35|EvS4<&y;@f<^w|LUIT>lpKb<;j~96%6Qz@;8Dt-#qp8) z80ce>X7QSFu*cJ$KyE=!RFv0ELfmBPQ#8E@`j(nM75+5FO($oNGf6{HlskJXYO`_O z9NKep`Ge;{pHCfYM_eQCr!L5oHE+wWQC$EuhL|ivx0gxVnawZNNv>ouxqvW zI{5XP-vGZ+^PAu|YyJ}WEy^Dp(DYVi&v~7BOA)tB%ijt9a?Q`&<4VTwjQn=myO68M zUCE&0+%SD`@E%AJq+X<`(q%t%!0%*Sm!`+4ttQuy-H7j@9VZitqP)2;Qriu2$(%ko zMLkXSk!umRPK#R)e*^P2lDp^f=V5#X?@4_xa&K}Ua$iN+zWu1}kLwPgeIR)dd9b34 zKSbH{zF^*AW=tPs{zF>+!?^Ad&3}|Jk3oN&_7mijTHI5}e_HdO zf&VPyo+F>n$Kk>-z486-&U0U_739TrT!lI zKKX$b_aWjw%K60+_@5yCg!C!nKg;D0{+wE7eSAs(E5v_I`y29G@;mZ-@(1!qMRC8y z=+nVJQ~!lTAIY}-M*dFzp(ykI#C3luztBVu%H^dV@8sr$k8O`cHvms1u0;yucix>wm-oz8rWUEy}Lx7Xr!fWD*V`{0)`uAHnOE6FNF zSx+^!8k1>}K3=sbrw(an=GT)A&>Cqsk&I)QP zabHK!Zbw-?NV{lptC+Vd^dRjJ873o&vfL=*I;eM&U0QyO+G@nD(e!R*FZzx7aWa9p zUfR1U%6gK}Q`FOBAGwxXN3K_t*KMG-k=z~S?4j(%Y+}NZ7b5;5+82|TD9Zc4 z6#18F{^g9h0{WG-uOhD|uOY7`uTzxeT~F->Tz{joOV}Qne>3!3kRC<4m9e*xw=2rH zJCJ{;=HCVXZq2`k*W63qhx|>n?C>2bzCK|V=7 zg}m31p3YrY_ze7KHUByI&oll7@&xix7aMV+E8HquTy zNT;IZCPfUH_TPu)!Bo7)7qEL=cDt=Qoal?ElN-v~0#%VjPcRk_gk$LqiH=}A7)~V9 z(Udun?5v0fd!kFqkzph{3|}UzIv!20OC;AUv)Roai`nfqTS}Dav6^j8XGMBLZ?v{E zu{N5F_eA6A4~;>n*j8$@I~~U8va%MVXjx~hGl*(a(VpI9G!;yl!-*bqC}}v_$5#8R zeMW=NA26!?Wi39VwX(jlys4qB(O>Q>Z>kI!0kh9+6xaK!+bS0tbt~;|RBAEXEEe2H z){%E&tSOJBdQpEh8t$^#t*E}T+9;~;iKWsdXojd3bq;Ot2U-k&yd#n938rI-xDkjZ z*T%xplo5<5SawYccUbB%TRrTBj4eh1rOgVf(rgxwRdjb5y1N!Vo$5<= zm_wb0V@YE)T@nx;RHHr}G2;f^F0<8!He1D9$~vebdxMxkR=d;XwxCRZqfyitj3ql_ zp=c8KX^zDW$8ujRStSGm6^QAXGMd-Q3XHmTlhtB3TV18-hEmj!^ZwokF3eKYP%df+ zcg5mC(QZu5FlyMjTnI*+RE$7ZG`=w!H_!@ETc#2lI>sueqNqf9oFB5IWjk!8rJ`^A z6-H4*D7Y?|>h3dlq8Smx(N-3W1mi|?G~OvPVD!Yg`?95DQrK)RHo-`rm4CnB7=IR+}uE4(>lYq*cCv$8Hhp zx26sYsH-zv5{sw1RoU$YHPz)tb$22Z?Ed>~sgL!<($R>h#3~9z4WbU(s6ZR!N?*Wb zL(hmgSc5z7Hly)apMxqA6`5h(Y?+p0d%=jKY`e|u#1N|n&_PG33B)bHX>(a!YPB?` zu{iqT332~&-)R%81%lC>#Ksj#B*oMW%97n?J0^}(P8{qn($S|Ty2j?Tims_`Hj3)v z>2$AH>E_kFhGS)8Ypc;*{pSLR~JvL4T=p#Jwr^5O-p@^ zm977LcGS04_{5;PTxLA1tPZguW*nzJsG$CyTJ<7Ag1G*81KBXL3u{XoHQq} zg3udo?5mg?>N%A2^hG2VzsKbg+iY1&8|HXAm<}e*$v({S>At>nBA)0;^rej6zEF28 zY{cSto}~Na1F0s~*=3|NYj=au8^oUDZ!M8TgGB9ETUf+sP{z^g#v{;a#UP>Tib}L6 zoQS7`A-MnyM}4{SjOw0HO|~^+BgXd50)^+obj@xF;-PAHixpMY=EI&KmQ-gl(bsG4 z3!*{81AU=XED{T1DpiVQ+MC4lUOts%R7u;KBn}=?F&rM;shiIy&5?Cwo=*->smEiC zp1sZ}YD)F>Mw5Kp%BM)JkcwoaS+JH|*vUar4Lz&=;1l!D>6FjCMzNl3>0tL7(f;^G!?8?ynJ0+9IlI}5 z7TC>BY~+9_I_qrK0&GK~C&X^%$MZTF75lp3XpLdZie?JSJ??CH+*z?r;y@tEq{HQ( zIl(w^f)Qscl-aUdIu*gVsNv=o+@8OrFW4;($Js~fhU~dGXKR)((r;i&}KQ z1OIvdTTnQN(Go{NM_XlgEEO~YLd9`d95{`p4X9H! zveXs!+8oYsu2TB{UF$49>};M=x7f%p6QQU${L4e4Y-{_FTCDPvQI(8t(9h&J z3xsmh2X``6w%!^FTf7d7CEMRpaQ#PLvj^mW7Z*(AZ4?*GF&xDORyNP>4cYZFmETX1 za%cP>MRtT7-f;Fd?Ox&kS7+(t^shyVrrW(%o89AeYh_9y^uL?_-}jj*)E)g>p%DIm z3-vfR?@+4{2GXP0sjxGMC;z+%R;4(vgo5!k@;uiVfL8`jo>;_#f2s{L!HaF*m2E;+ z7*G?2EK(H}5Ak3|3=xZrWxB(*?D)wtg)oo-6o*Ea*t|pos&ODy+hO}4aT>+8qQA{; zeyE!yTjQ|WqhgtfN~LWdzyPCCkH;#WBdyCsw?(57c_K@%!(pJXF}ltWr}aLZ2E>cD zxC^;);(pB7E#-TlY`9hQV@T^v3jK6uc0+Z$K~3e@TycBFP!lf-xv*sFd?3 zcbJ}xzIJ=f(L@4%;rz zPdK};XN7*Y3_0N9VDAvyBF?9e;V>47r_6YM$qy6lgK!+ew+->XQG&^jBQaj^)e*m< zF~Id6^hO;Hy%Q+((+TRaGGZ51aq$RD?&1*}b7gjPgf?qZR0soUk~rPFaX4YWhJvw3 zAGeVyWzpDb@j_T8M6rOxqQV!%XgnNMU(9kZD*Eyky9;{QZj~p8tliJ<;*Uo9)VC*o zTC#i4lXAEl!uZCBw?0Iu*~cMojIK``)mZd=TlI_g4+I+Jl`S|J+t5GiUcy$VTkUjV zoSJ9^8@pVrefzdf)qRR=j zq`b!8C_W{Wl=;f*%J2$=78GF`N4>$IMccBJ-_MxH9SgO(tayo*Xa3r?v0y65=X2fC zSTGPwFIFGrFhfi6{4I4`Wy`X5KP}7F;So;+@m^7!e?Iy?|61Air8valtE7CqXYGL$ zSv=;;|0>Sm42r&&#R>cWTHI7sM9^l-&Nf-55c-*InP-9BgGUM9ryAp_l1}+e(!4Q& z?+lLS=B`*W+T7K>!KjF)Vx4jH$Yj*%^18x!1Ul`i6*>36)-6ACSBP&!s#6-03A`$$ zu*-@Fh+DcG8l`m9wYRX7O54Ymn#Yod54{Ykcmi6)mQZ-6B3x`|!hp z{EnHKutEHOvb0srV8fT}#Bob}u$I4VIaL*Jnj)zT0HJ0OsH`m zqcNF6u@*Kj6vF?x&`Ih}T^$}VAo3=KJ&6SgFG%zKV%ESQ1HM73DTz z`I}%~MWESe32b&3n-1P|BECY254!3#%x!Ua@lh!E%9|DbYZ>e3S$P{yn_KLexQ%A~ zDvGru&K9CS#9g#hG&SH^V2BTE{7sU5k9s36_C4)>`aZK=;TJc|099pcZxU6>-`T=J z?T3lx793WUzUksk=O_5Xgj{+KyfWBxXI9Z-DfBb{a@CjGGc69_19uAFM&yql*+#dI z!EcAnz&E1osY?D-Qesp#H)kh=y|lxjZ3(z9Aq@P!9Hrvh8%Cl@ zeA>rn;@m*APpBzr@RzqV8NNWkA84&?Ecg42%0P1^Snprq!=kQiF#L^GO)U*ZeMPHS zBVwb%O}O!uCG$#`6$Uy#T-H+Yo+0k4p}RwyrC75g>N{|TQa`w=ACFZZHS~3-@e3Hv z{6-}nKHahGy||+Ih^8+YDGX%E;1OWA$uGA4R;(D_NDLoTV?njc$zQWDn+(4cv1Rp& zAI(KK1o3^)7Q$F$s?G`nsakx+b=l;vBGqNsUK72db?FVL*4(!SKO{6!5VJH}A0FVF z*C&O6)F+#2#qLyAC4NT73v;}aA9w4Tq?6l@MMJ_Nyjx_RL|I`V(+)M2x*g&N`!a0C z;&ZzAW?;tmKS!+?SU5)6QZ~uvwYqIqts)8osR&(Y^EkziZ)E}0)DTKWBlx})v8kb} zFU$CbuPj@g&8~fN3|eenDGa1K*)Fla%dejeqFo*G+p7BVnJ?O=1iUce&mZC&)Q>=T zS5BClYSiZ_QKQ{r%e|gvg@Ftr_E!9YVX@)c(1LC7pz99eaE-72P4&J8qqiGBcx8V^ z6Y~=vy+Tn8C7&i)VW7*@;=o^CpuJ*(Hm&RJ5g+i@iLcCJK{%S1H8&XIE1r-zODCaL zr`LvW7~;T@wfot`v7)xji=n%Jw*yydlU8Zm83j8x{&Nj7V!(~B*j zRBI@OfizUSo_k6i;+)f1kI!wbEzyoqg6Asx;_!$nTt-hoYz!64mI4&<9GkSFt z`l&Aal(IT44)LQ|c?~8{B81-UG8|P+Wv%Kb2{C6}UJri6(z=pDKV6w=k=0ge7hNg# zQ$N03u8W4uG5jRa5U8wNR#_(JXSe7*F;iUL4jcZFGllj5XNtq^aEjj{d|3DXSZ{Ys z{PhavWUaq>C(M$n73EFrGLILXoO@ISEffaW9*4~?esrJfS4TShE*MHC*5EHC91WNf z(e;~s>lB5Df)1zlM4-^m0Lk9N{({d$nSIkANu}fBX&_cwStWikY^pc3S7*_G__iCu zc~$lw?E&^5Ue(-U;)xkwkH1Pt$$^rUEw2~uvS{eP=6uv^b%ymVn?gT(Xl8~uU3RB< z=@LI7HLZ=|iz0qNk4M94xs5K-mCTRmMQCe>*XDL=(^C2UjG~fyn3ZC!p`x|Xxcn$9TGl=|Abg`D)+r86V#dfn;mZEepZaYT`-;`-4O+1d zvnPs-Fwn+g=Mr5kjudslltZ}c$IjMKUNCbb6Db9viAmRjr3gw7q z?0y@4`4e9?1lKpj;t|oApR5ew?IpQkmHLauRXxGgiR3EzK_U@9H}CJcv3M@`oVrk7 ztUIy_e+LxqD#+^`yKILB@!NL@%WIYR%g&{-Uh&ru2L7IORVvui+Z{Fc^h82+k!UKM zjN$l~h`&=7jsHhjZLBwYABBEAnL7J|ozc{w0$frXPeh`t9*Ki7#=<$f{Iq JWAt3({{cz751{}6 literal 25210 zcmbW71$Y}rw1s5~wGFi4w9Ph66UT{FF}P@s46+m1!Mdp|t!>4!5JLAF>d1s2Q5jVu;RAz1`-OZdg~OUM!ANOBZ8 znjAxJKyFB)Zdw0WavV9HoIq|&ZbD8ZCy|rMDdbdg8abVuL2gQJMs7~dBn@&4au$he zm*+d1+=`q-&LvC9d1M(mpIktelU8zT(ni`z2k9hTq?`1RUeZVU$qI5Ca$9mca(i+I za!0a~tRkz)8nTwGBkRe9WPofS8_6cJnGBLGWGlIdTug?@HnN>uLM|njk;};yYYw1Ke2PJ~CjeQ*T7#(KRd4np`F0|^{{tCIe`A|u=mi8-;?@YAzV{vpb?97-NW9!?%X9!VZW9*ugAQMPRFSn9`-$20ad@=m075_vLX z8zFWI%Bh-v8vN5W{|p{;CiJs3{cMY6RCYd{i+L~^@$*ogLpdMiZj=j9uH*3+k{3a{ znD!;)rHb-8E<^ls&A);{{-CBmMEzmtkI;Tpi$4baaq3TyPm)h* zc~2wn8S2mG_0jXFJx{)Xycbb^M!|VoK1O+&u~+iPSzd+yn&!U_{|(K56aHJ8|2F)0 zH2+=r?`i(~%C>v}{X>+Gls_8lOcdE?pJ@4?Qu~bj9LIb?`%CgGMR}~4*WYOVx6Jtt z`uDVd(BeNr|0(Z}Mju#y!Mq*8++X4UM*Da25Asj)FGbOQ0n#O56clQD5&UAtO7i-c za_A$ekIL%>qoI${{0*41At~A#OM9G_H=f!A#5bnB2|1CRq$tjFOh59bpe#h0n%4`a z!Jm$@fXB>$y{Q)8jQZx#XVNyb_!iJ-QQuP2XH(k>@j0~TYVlI&oT~+8u;;7T7_1jj z#5v3U75!$_{H@{JP_{#{GsmIDozPvH?`Dn%x>wVE@co)!!MtstZ>#JLvVF3>mbU}* zc0^t!?JBZb%d0`WR`ctaQ?Izeh4cf+YtZr<;WweQp)@lmNVaHst<)Bgi^-71vSGIG z+NmuemnzDBT}Ev=+F7CLD{*`eV;zXSg0eHpF(|80dKudVc9?djc6ok85{(8;d zjmPXx?t%E8%HC+QmbW+beVDVarte2>fARqGK=L5+U>tu4?L*1K6y^0CPVETtNb)H1 zXp3c|>^L2Zy6#0e4&^44<54a^If1z+k|&WTlcy-k_D)6qX_|jJbIyQ%rly}o{cQ3a z@?7MdNBew5QSU}~QOnNvi;#D5K3;GM{7W_eGUi`SUO{4B6kMg{T}}NO@>=pb@_OrmpOK#F#a|94f!qk9r-%&$SbR@3XK*F#@OJD|lIs5K(qr0fZ& zQwwT&E!0}cMdV^KgyY+2w=2qiSOR@1^<|pA9Qq2)UkQIF=5>%eYw=Zx@1pr(_}Tdr zVSbb}$u7kSTt|gH)MMmo)UlR!FByl{M?0Y?uOmsVAMssjr?fcco6O6Q1ISyGk8ivl z{<^$hILJJq??!ufExre}JrUna)Ay#n5A=O$@0X8n{0Z~}s2@lkL>`>aD?9{whoYRo z*kP~_r+ox@BzY8hG3#WD8^&(Qoc znRAxKvdKf5eh%`^W$ZlieDVUs?n1ec{zb5_L%CS_^4u-_+u7QG1(whkO_L?_Dft=lmm>djup0aVoCSW#_*dlDit^ZR5dW6?cgUYa`+Mj=z_)-uYWY7g{xkGn zH2qijzcKbZ`G*$&6Y;2Ihg>k)O0r9O@vPfj2=R+RahAb+CrCm+W6WO9m@H?vV`8&d|)ch)CPrZvV6UO>|u zs5g>Lh&OBTAp91_TFFJ^VlqUwk?rIX#VM!JUxvEwLs^b;4z(4qSE9t}?*zMp_Ri!g zau-F}FJa_$YJP+{QPM=bi*`5JqbQG$A-KSqX@iny9lIs*@{vhJ(sqd!gyIU+%FQmRFat@`v7wo;YynW#BtNHseZ-3|qX!?QF z4}yNMrXQm0X=2`By%wE_a<~>h0{)Sjf0VL|jwX*m{8%l19QEU&pODw5;T(%j%KJqp zGw&4Wr)qk3j-1Z;8Hk@r`z-Qo#c40nKUY!q=Xuo5$8i^+T#u6N$BVSQiy$rzE9Tsw#c!l`6Y_4ReG7Rjc^i4VqOA80YIl-% z;rP3?_&xCN&HK}Ftwr}U_kp}#^dPl|5Pz8VBjlsxW8~wCvi&EhJxM-AK8<>w(ej?9 z{v7!{;xA}%jMa>6zr4(xSIAe%*T~n&H^?{f9yf{hTd4PKly6bqq4qBM9{E1`0r?^M z5&1Fs3Hd4cnWDVzTZcD~f~ui$^p*f)872IgJScR0^t@cX=8^aK1KHUB3b`!n=k zH2qijzcKbZ`3L!@miL#k(Tt_IfOcU%UR-3cY&t{pOPDi)97&EMN28uGv^P)`*I&FL z3MlQdnm!Ktc+H;xe`C$xgvU(8d2kIEPex7?$`q6el&L85m^TgfbnSSad-0}>Z-)5h zv}bC02J|hc&my-ZXOo<7#dGM-Rg~9MYO!plw_Aq#=4a){=E(J-Lt!kPT#` zqNs0k%%kFFlrUpKvISaeKCgHY{Kbrg$TqT_T%st?b1CwdQD083AXkz*ksajD%BcUIa_h({kijT?r#mDlPI~n>ZTHdMD zPa{uPocSXCGm(FmvgP@l4gVa*&einusGX1a1+*_DFCs5il=WPKyi2KHMqW-{L0(B- zrD)8je+_vpc^!E@>U;*}2Ib3kZiIgm<2S>8fc7o0Z>4=3c{_QBqCEaiYIh<3Zrb;d z_iB0fA%4H|4fdft{z2wGL_UnXN3`P}h5s01kCRW3Pm)h5%KDzRShm=d`m^M7sN)@! z=e0bn1DT&4$Cv28OumBrSGD}tsJ{;V4cc##Z)tgND|-u!P4T;ozem0g?JJZIwEPd@ zf5iC5YtNekY6g!nnC|-@*72Yec$5v?==5==KMhZi1<&mf7bGTf&MG? z-xOzYofZE<{ZH~Q#S#lyfPJRdVks#^!LcPpD43Ha#VBnkNGzFyGJ^Rd$x+az&>l^W zAvYj5M9xOE$CBg7@rvTQOD0g;81^QbK9Tw)aEvU~TwM93Sm6*z=Vw+gkv?oG~l8wHCJ_Zl~_h zbSE_z=_Wm-S8+?swGuzI3fT21+tA;Z+>YFy+(A*E-;UHOaa^NVGx?e=Oj`8bZKY(%r{TmhKxZXtVX7U!~-Aem5@^(d;e+S}sQooD5 zTZ`XA?Ox>FNBe%oxgTnI4>I-;@*dXoN2osv{W02)Yw;(bKS}*5@@euJE$>-s&msSL zO@BezrC4hvFQI(Gyq96WLi<(nHS%>udHru7?@j7&k#CdlknfW3k?$+Y<3GUhdOv&w z|6>&|{f7CUlAmeEeU7{@H2+KHdan@hbE`aDf9gFm0K1!OsCCATJRq+L;-rvt}3HQ%M|GOX_r9_D*V zAL&<=$5&9>hTN9i4##h=e<3BX6Y^--+=Ka%aR>(cXm& zlbwq4JR-=8Qa8yiE#8fI5A~R)uU7W_PY{nY-Us_0lmz`G?0snW!`@ZdvfUK?wB~2v z4={HPxmJs>qc(`V^|W^*cPIByl=bgPZ7<}buY@Mb0`$pQ`&}H5N{D(FF5%`a4{$o7maqAAK?6dF5L% z7p3+h`4agu`3m_e`I@3U=5=askZ+=1%q~By$P`*6oL--$Q z{>RJ}>-|$r|BU+Q(7&Mlr567R`qz1X>v8bEW!`t>_xbpU9}xdh^M8W>GxL5Sf7Rl@ zA^yAO|Do)y_l5o!bqiUL&l_0?yNG@9z#ggjqhODwKSt9xfW4vew|+qp&1wIA zSRGC$>MKJ|hu7z=k0b{Ynbcrad#Em&>TmJwL$@>~5lEs-y+ z+9jaGqpYR=A4*`U$7^*stgD1dtg${?;>r@qD z^wx=vGs>1$C)XL($-cgUL@W}{#FB}$93vyJ%;Uq*s8gi*&l+7GpWDG2V|@`-V{4$f zy1cpvwRI;K7)?#pxLB{t>acsQeq3$Nl!LIab0Cd=O{djJxLgiDDy*un!bl_r!ik8f zD($cY+be_BwMKPIb8~wzP+i#;XbFalra*I`t+oau?RQu)!gj9{ZDehyQZf$7_1Z!K2P`}k!ZzQcrD@N5X<|c~GF6zuXs!}nk z=vBAV=k};R2qadQ$JVIWal0|KPE6<2 zfKj%xr9Xo{j2qSA&bVm=)sjhz35RoA>UUdR9;@FgYR#H|KQEuhAtrUG9@n}i8I73< zc4hGr>0y{NnAw<3p=5kOjeKjyR5QB6BCgtQvsoST>hm5Jl-{SfHjhs%%;x3B)K*NQ zHD)Ruj*F(OeS=s+o3v*&OtVD7jHs6MUBM`Ao@a!j39C`nBQ9>G-R87<{1{BpX`1to zmYpu24J}uq+gfAdV!BhYsBAfm4(zbF>kVuNOIsE<8ZE80i#cUl+m;(mZJ08$8N1DA z#dJ~4NQc{loHSzJ@!G|dX=pX3)+aI<_F+M=t?i%7raGDGPp0G=!noV97@cbM(8SsQ zW8B?7zaMok!M;~>k*Sn{%Ob9R0N(X~#eP0r9- zj30LMSQwM0Kb#m;ecak6B+u@m%SU+-1-DXa|Ui6%W*1H6lr=Nt@$z6xA6 zo7;h*6q|KJEYXz;$Hi3`WlaI)a3e%*=n}CliET=A^0lE~d^Rr!w7D-9NhQR6idbK#ob3#OB;1+XDRL|F27u=ZHsYBNe}d!sieHCsT~V9yS!us;$|k6 zj`bM}W9$3OOx#Sxj7n=ru1+szq0?=}rHICqgSPX7fNSu(-C_t@8;z-r{h5fmr}p<^ zVivZBd&REP6zhv+%&5_fbqintCymy&>L&EL1MA<87Tn^7p*eqR0sD?v{Q>Od%@H#m z7duWQX^mk!?-*IF1fwpt4s|z)TQ??B{w8NMSzC?33Wp6hO(!OkY)9Fi?AVIwge!5| zY~0^l#3mDu?K)s4I3A0{9!g~aE+K;cS?<7=;==`q?#bJK8)28viQW;<13~#LBW}KY zjOi$l&oPF$$+a{axREBpc8}kv3P*Z7F?&V#IB;jT^Q>v-2b-tu!i`Baf!-0@yx14y zgHp#>S*cN*UEfAH5j7T@X)~OP^knZeMm4U|Oo{6eI~i_);&POkYiz5p)J@Rg@`)*1 z6%>P!NUlky&D0v+x`RRZA>kWU)+Okmj)@MN&0gX6L@O%LK{TCRU0oF@|N0N_;bJsg z9&xJ-V9%&CV<~IQ#O6~QOQHL6Gb-CcXQwmUf-r|_A-jFJ>^_$m@wdgbn3*oz(vzuf z`9QcbDE(|7rHz*Uh|#(RlUY0lc1)936YX+Txa}@mw(Xq%e_S00&4JBabYly)5lk#I zkxW?=*ha)&C>$e*Gck?I5E`2y8$)+^E3p0KT9bnJKl)L2FglaX`E5?onRVC?7A4F9 zYzGN*9UcL(j*|V@yn>pRZ?V(q`Fo2(7^+3_2!`Rqtq@!O?2|C@aee7bN^ZqgaldZC zH3~r+#>w&$eQrz<&J<}6XCO<9D^)wC#X+O2w0KqS{CyseyTYgSm=rpP*JDnnLu@)V zn0|7Ada-Jwn1Z1ib0CwB^q9Dd#7(I3CMajRv%+rgQs=MCVV!uOBzU*uy#b|D!suOD+jnov{DE)$xk`QBQ78 z$vTBFoJl1fxBNc8OYB(0J$nWrcZ96IeR!g zDqjEGKCy?2^Qm7QOY#&fi^a36c-n5j2Ln6Ck2xh2hLJ@r32G z@dm#T?`b~I-|p?r$<7$w8vpjplQ!y8$$@@|Ry>Z%*GM_z{S`iU#FbnB;>4sdto1+o zaP^Q;I{FmM_|fNc6`_i7r#`1B{MQKoZKL|{IwPH(f9o%w5QfuVrTvf2c$3tA8KN8FE^GC;M^>o#0W>U$xInak!uj(EX5Axz&UVlZDTSK@a z;`Pc0A(xHzP$$5z8Fsw=+Hf-xQ?DVKz=xq&M18d?5O414f1_Ny%wQ@StplC$Sj4Eo zM-#Qtg)7X6J*sslg`v8WAI%(2zg@JB%P%ds8PD9j?x49YW7K0wX5WOxQzRCsQC+(j zZ{rTO9;t}fO;ek66o!~|@`1Bfe9Gb5Rv-i)YK!D9JSPls z;qs9U)6VJl+C6+bY>&peW0`PV?0xC}aKucj?I%>*AB(j28w->1D88S{4|=lOMPpqN zfBx|&Ck)kkcF?ie<2zWg=>g3It_P;6`2?}1oc zqJgL|MA=@>+AkaC*9Qd^H;_9lr=S zs_~ujYGfUq?#|r9Pfi%ljS9o(@VI>rF?{lRqU!6XSP@onJ@`D(h2KW-nJk41S!`_* zmonMm!!mS5(1&;+%-KUtOs)cl$LW$Ei>uIqt$pH4O7_vm+KDEYH#cU#dA8Ly)rwCD z_}E!#G~?G0tU@2wgxz8HA>#7aBl7?E^S5&Iu&W^R) z;m+S{hsAxP=1YAr)>(Y^-V2Rl}*(x!C-B5TlQ;27z@;H+BzG}nzb5Vb~>}S)q<)N?k4G;>=ep>!Al#h;-@7nC((|>=8-RlIeR#Li%WES z?D!Bj>k;ho_?VjLHs#MjJGDsLXidh`z1V~BQxQ~A>BIShP zOe8eybl{sF-kI7kq}&F@tvvf_x;?uS2Be7Hqd&V5ik9$AH2;d46Na@BPHJGYrIVIn zBb*e^U5~?O&p)xtN`)|->%pDGg`Zi)J{ZCcvmxCtf2OpiamVbi)DOhF#9gd^K<X%97d_%vj9tB>5``&nJOD`oA%2HY_&l*BJB_#v$hlPD67rA?z| zz`&buEGY)Mzejw;EjL0q3Z11UlcU1tvgdDY;zXn{tP!0u7X2>Hp>)b-7||(nv`W2r zsp0)nRSIF4y^7yi8|1F8Qe| zEbgwt9?mI<$1lIjC4K=DXRr+OHkw&$rh3ito^W5E8SfmxS7-5YaRu&U3>gb^q3kJ$ z&B%p!IiJ3GDGaCc(G_^AcX{wsHfJfuI+?KJRW)wKye(^MNf>s#(!}EiejLKDn;H2# zoa}jgF?8A7;)WK+W2m%;dD(7!!j#WUm|D&Ksc^TMDvxo;54B1ky92&ONAM=8`dM70 z7jI6H{HN!fFw`R9YW3juE56-|-zXOKBvY~VNxYh>2haq5G-_L@wCt@W`)h@0I$Yt6 zM6f1vONqizONmY6_kEZ6ja)QcA9F_4D^p|4StBj{EW#xl7ES2mNMWdP6z!BwuR{}+ z=+n~a?brvVlepZ+Jd9|#V^+Cc9>3y+m^U- zm{|XQ?b?(-)TN0V2iBik{c2lPiS^gol`^v*68T3qt#y#(^COwGgte5H`PjXPNsj1{8Z)B=@v69^Y*uMpYaB%qcE^{N+#L ztMM%nzyD_Mo1qvssZ5M#g9jlKFWI^2Wv4Kl-oO}m@ROJLlb=vEHUs=lD_>{$M>Gp# zviIe0YuM~jGk$24zv<=dVQxA2%Ht8O$Q6jcC)m|0mVBTUmbT8(fU655n_K5uVK@_4 zJOugFyP23zfmnYWzfaX zRSJKodB%rCxB5FCydX_&!nQAdxWiuytj7~-5YO7;7dt$S@vHb`b^Z>AU)&Ks&Bn|vU=BjkKuRm^D`}(4t zjZrflNyYG&W68ujRc7Kp(i&p@*_O^35s2fxX8r%kckLES6%j#9|pb6aEnT zL&;&}aB>7Wk{m^jCdZIt$#LXn_vWxcAD%{l=baOEkkC>9FFVP@&@23DC4J7I|K1EX`iLV&sO$`n-M=3<#v?w zn0G$xt5GhXe<691qOAX7YL}3gl9%DQ%eA~K;9tquRf;1a$vUnfuO+V|uP1LHZ&Z}W z+=PSCW?@VJ7WlW)zD;o?=Z}ovf%u(T{$23z*8F>T?7h(M)AakPKVY$pRC&{PCr^#o?XUXRjWxJoJ_5$j8k@ibk{AFsdApWYdM-8X; zI{60qrlPF(E#$wg`R_32UGhEheewhHL-Hf?W7PABvSq!W!vCz`k7{Jz7vz`ZSIGOi zkZ1V@{ze|9s^(8)-gI&XITLxaPz?IBVb8HxMsEeqB{x?T^-6u7%9S?OtfKh^U!Hp* zG!N@5g}sP&8M&C;lEk^>aZAXhq?O!SQRdjF*-@`U)1B~LjJXv@_iJ%4;y%XxWI4GF zxh=UJxjnf9xg!}+lj8_mBlg!_d}j@HQ7$CQIxT@IR8$X-@%-np|7L8ixv+3} z>_lFdvd65amVkB`ZD>V3D7&L1l_uNM%XkWUns%QS-<4Vh@ht6}7RS9W8lb*G(>FpN z)cj5GcT@hDTbREGxhJ`oqHNdR)b=6w#qs-T@%`Z+p!o+f?;z+0(>_FtAFAxJ!x29m zqKZQA`LO)H@PgnL>jJcvS zHT^93XKVgB$}T#WJP+~nXQDwGSg{EOgU%=jgmekt_JH2-quTmk(`WslX{eKq6P zkk^veA^&>XH;^}yH<33hihdjWGV*Ul`3dDVl=qo)J9!7RJ5gRlxeMhX>UYDwhxWbX zedPV*1LT8>;(X)0)E>t1j}-KxN8vxF`H#bYg1JwUPmxb+dCws4S?bSe`t#IYP#lLb zS@aS$T(fA495=5(f0g!Yn*KVqHxPf5_FG!~ZEEk3?;`KLLVO(Tq7Mpw(TB|Yi2NAw zPqg@_)IWp%xu$;s|4YWc()6#Pe?$FSP5+MC_lW`QtTx0=0>VPtx?s)TdZ1<4b8zLrxjWbow*2{F&5dk+YFEM~ly;zBy?iK2M8p zL47{-1)9DPzIpydN*~|H+{IenmdM+R`Vw-f7Plh4wdUKHV~6gb?Sx%U+eNxb4~cnK z>_eYmUKINk#l4gMHY!Kj+akW5=5JrfE#A>$DOrhl00ncdxB}&PluDG1C{;XHHSA85 zWhkqt)xfT$T}Lig6zwRkM+s7IAREahvRP5)uRwka^^m5wQbRj2L<-}xjrwXF*G_v4 zxt83C>>zhml;>MVZ5JFL*7OMcC}S})t~i11lgD=4;0y!LFk8}Y@)xLqKxfMZ4Yu!TKovakEDJSiE$#^cr??CnaY;+pGEy_=;zQrSBsl{d_LN7C2}qx zFC;HglyzOg<1ZyIBQMwLy8?Bf9+5APxeET(ntu)I`+>3R$m_`)$Q#L<$eR^qeYc># zTdChh-cH^@-bvo2D37}v$K6x#C+*0b`=H-X`+-8d_(5t9kq?uPApcR?kCBfn%JV!y z?Md<}@@euJi)9k;iOhcv^*xVust9}m>EX6_hrtQH?fZ8PMJr#*q3s5lwptz;6~1C|2dmdbVcFM%oc~XmKZem*%_SdzkB0oN_6Bzm``{Z5!lotLfWO z-=5q7@g22z0Dgt$SHiEdSf=8BiuRf;Sq7cswWLRMv@;Zam8^}hoiEKvx z3T4au7Wg5}Z&mixLFlV^OdEchO2J(ZZd&b56XJ<)00XQG;MiZz05_srJqKAAMITgrJkXd zC3DE@r#(P!P?Y%_EtYBK+}y@2lziQQx0D0PzEr zJq`UX&wa3>v=2f2P?Y0P4%76*nR^6zB;rS*98Di%OB_G#8ED5)JGP+7x{s%R0(l~N z5_vLt3VAB(I!)Qr4U0vTlG9PnC}^_Yv!LCMayH79JoX&&T=G1`&Zm8WqCECOY8R0g zBkvO0muh*JLBE{(6^hd_=4BmMX?a&uyN0}$ypFsc_1r-FMnzf2P1J5CZ$aLzv~Mfq zO-Emq+)?mL?qtqgIL#69_`nW`w_|;^xssJZFq~? z+vGdQdsmCUNBw>1AJG1=5TAj0UGg#YPYQa;r_??pKS$mdTKr4+Uupi=@W0XgZ{dGe z@MoZZN`An4Hy->E9Ij#|KaoE}|AqFih2u(ogZ{hb|G}I;6^B?*R}t-EvV=rX^wE%^ z5HjvNp5Aa z%;fzZvQ*2nGQKrwBkicqLEA~X6lHtdhfi>8O+M;ME0dJKA;dMDXMc58VF zYU{}!9GBGMy~>^~#%CJkQj|U|zAO9;%4sNBltWQ+TD)J`Lk6H@&J5WHWuq1!WPB63 z8{)fb@jc-0$=F`x-ioskllNp_MQQJc{QXf5K{-Iv4`l8^^GG?hhtG5e;DJ3 zlSiPQBb6=VN5SV@lm5}n!&n(|EbZgS0vgP@&qkcW~8#Mh!_%|_jv!>rd?N-EZqkX%@GS{zclSA%AT+EFJ znR7RJ4|y+Q_bFSp<9_N7D9**akorU9!{j44?onmS_+!)`hyH}7KMDUS&3~GC&nV6{ z`{Oz4&yz2ZFXH%@XunLpLcU7AM!v2n+w%rBjM*V?(SBQTbKEPL^RA+_-=p?E@;}h@ z52=4devJ4hv_B<3Q`|g`*yku;X#STLi_qmczE+g>H`Kl*zr%6gYw;hb{|Nmj+CO8Q z7192M{1x%va1Qgn{!aZ55@U3zr4Sc+LyJ{R+9fJBbO>}Lind987&)9Ap(x@*N2xq% zk5=~3G0?}-9!G9Qj#rfV6R1rjC*iosT6_xisnDls`gH0u6b&3RbQZPQ7RxY+diT!Ne)+Dl|Y8G<`?t0nM*~Ux~7exm9GfqS1j^ z4Yk^WCa-BZVyjW=84r>TWFy%`Hj^t9WgRWlLa3*e_DU_j3VNIJjb|8dC)bc`$(F_0+zt7=)82#JQ&HBt7vg(U--q0n+>hL!JV0@t z*+&OqUZJmt9?bY5A@5|`r)YVnLO+fA>Es#YndDjI+2lFox#W4|`Q!!Uh2%x##pEUArQ~Jg<>VFQ zm5TG8q<^)dy#8ybT}xg^UXSa!LCd=l{!N;HGyGeadnj?ckT>SLD~^H{`d9vYp?tj_=7IaQu&2{3rN7^SEEgUkh<@K6%dH$v=?) zr>aZn!z@Z4Rzwz)C5R2t@`h5!0mFtXeb|UXT(ouAC=`sPVWSxzqbOrzp^u}!89APu zfLIyriLfWpo=i?rl=VzSe46G@XU+`hGc|n{{Mn4nA?K2tlLk3YQP#f&wfQ)H0quok zDY-~dm=2c0@eDcm-KWR*}`@GDUe@4YgVvUq^en7O$rkBpb*^&YH6N%oQ{GEMfOj=fNJRlck@!&sKgA>OaW2jFjDY$G{HZqo90L*DMx z_aOIFT=0UHw>M+^Ab(#?-;etK%|vi*l5ei-$`6&IT0=Sb>DY5AB# z!;YqY40$Yh9C0(l~N5_vLt3W+%>+j1ItI(Y_prlQO_3+F$Z`Z?seTKqi3&)575 z;9tnRi!}XW=$B~zrOIB2Ya4bs^RFPUB(EZ`R+R0z2Km=gzmB|~yn(!tyotP7Q67H_ zj=z=qZJK^NwL1{MQ`7IFemC@cXx~fTN8V39Kt4!5L_SPDLOx19rnvAY`cIHg;#?o0 zJVpO$@)`13@;UN(QXjW3QhSMfnS2HHzN&25AFok=oqU6Q6M1iGd2hpihp~4x{XOXK zQ~!YcP_Z;d|6}qK@>B9N@^i2W^?U*D0DejRD{wCSuaz&``wjeW;TM75G4Ffw2l7Xf z{VI?98Tr3Z|CRiW{GI%R{8MqbMX{9keRwf-%*o*;w1<#G$zdc87Uvy40%auaQRHZH z3^|q@M{Y)rCnt~-$w}m7atb+>oJLM3XOJ_=S>$YTj^d(L`kRvmIgb?Avjxh0`U^CD zA?;Fdk*1f?UQBLDZUru(E$UrL+e&Ut+DJR;0G+hOaW2|!&_mlx`bd%IM=96*ZD4P! z`P;$X-eOsFhtfh<(T>$9?ey1>YssBRk=KE;GyQesE@YUDkWrHJdw86FC)q`IlL>M? z*+V8tasFPE6#X>WM~e8av@;ez44CeRq5VDDiJk8(e89|lab4W zV~JFk5zZQQt<6hA<^e`cZ=}{JH7e6qqrRpl70+!*XL{NkR=3CMaJj9vrFqlgcE4v7 z)dWIzkH_IQ#w`!`gwX|sdO^km567T7T!N7f)IfDWkNdqBhXdT4w~LWCU|DR^xV}RW?-(?R>Kw zHowbej5CfZw5BeVi)T_{(KX?uQDI%hYCLF-&7rE1HmWtR&f~CqT*kQi?)YGDZ#bnc zZ*8sej8Hr?5YJ@MUzxsiM%HXZ*5ab<4!_lbtCUx0I;v9KGN<3|b2*H0tzF4*G!f54 zGU;$E5>CZb^($(WY*d)K>NUH|Wi>CCcD~oVULPu~4JWfbs*;W|fn+$_6Gq#5d;4+k zW$Us=XfT_L_hyZnOuE0%yeDhWWQWsj^?Ou{g>ARn*$r}#+3h|j`XL;{KpN=FS~LAd zX)qwXRm~xzsi_GUjuB#a_^b|_xV4&-?@fo>?!#EA@5?5ZMl)&E(ol1#&Zul^Xbv>C z8^OAUy4LC{T(HYyb$L1J3U>a2eQp=-OH*Ga(Hj>71>?>d9YmX&sTj?{N>n3msM}$6 zc+n)SrdYul3sHY*B*tI_q+YpcN76helw8bM!jG#4Wl~uW{ejMr$SLac-TN}7e zx66-#o9yq6bDV^N@qu{Ka2n0wOgxoCO`=I_ajY2eb~Hy;B!_>#x6vqv*J(plAzV{m zKgO(JI5lX+d^Aeew06hEO_dR8AY|UiHjm$G!)TUGG|hbX+g*N#9i0|MWBN0L$wbN= zEX9>d3PSqZh+(T&dhMv$OQQgRNmj(B&swbZC;Pn=ND@eDpV#0j5%}N9*-BD z*f@|*VBRO==tMban^!@=$|M!u5v)aG(AwqYX->W}m)GIP{92wxHzmTVZ7m_`7*+99 zZ#dIq1hx)gy!kvbq&+iVpklHKp|iD6Qoi$n%9x&Ev*y2&VQZfG(p+FM#zh78Os zD^}k~yBKOVj~H>br3E|RBB#^o^J2KA($U05)#b|@h0Xib?#C)*cUrM#;P||)ju$i9 z=JorrKsI)#v$;%qFr195%jzhuh0-FGzbxQJ^FU54z8Jx>KEKrtvU24VTIcaQFs+t% z4TP=X&J0!`qjY7>ssQd|E^fpY)#0vW;zlTdE?X-WU(5-ybl8^CK~06#$La8Syy*VM zY$BP6;=$H4$hMY*lwyQ>22pn^+n>yd2U}TlYx`m&*ouWjR`0MotWL}aSv{S6V_j~W z=!yDVJSAoZtDYR{j;A)EpcAAn#-N&X<{BylOg}L6(Tdjpj^xAQ6ja#Li$tJef2iIHMi+#)&g}#NF0Cbw=^bc8X_hLn0MRiv=K> zwkB}O6?KhOP5E;yF`8R3tnGHM)r~PJtCA*aVcSJbJ{z9?wRW2glP8LYf5*7W?nEkV zG^8Vmq+Iy)rgBco&2`xDEE7{u4yU|l-UQ=$>;N{!s@Y-A+bQ-o_No#K+ ziB2o3N>>?Je8u(+cc3$!=~cTEvCiZ6MpoPL6!h7xa!x4Ij((Fv2)DuQ^5HfF`!eZ` ziC(ctiCtYiMu&8b##ORuxJ=R0SkuI{YF?pfHjfkAhGuL)mUd>ssh-aMOb**pF&>wv zyThrJ(Uwkjc7-u#Ytr#tcYNu}R6^{j61i~BSY{q#gnNyKFxKrzo85)mqsEW4aIV7b zvirPp(T{f}#JYw~RZU-BT~jBPHL*6}u44c?FjM7nZ#sAyt7eN;+Uc|V#YUw!9gd1; zq_qdrCD_musI0eQw8@19_eR!(4)DmEZF>6nvAcavv9`pqy+~o9mL1tKu3EW9S#?8o zV6hR1!p9|JXKGZ78-#65HlF1kb){WgqE&3aO)KBQcDK`phK90(8C+W!vrpXeta>1< z3@Ozd3D|?n9f)W-S}mpqyMUfs8~JXZ&5fFy;v4a{fCs3k(%O%8JXlR7KSc5H6^|MY zQPUH9%EGeeaEmo5=t`&sqSqQhkG8rJjn%DUVbv!!Zjje&6^~)L+s!-XW3&xKgds7@d$f@hC>Ars4hs1d!w8)ka?4Rd)h*02h5;G0`0 z^*qPmz_kDC8&LkSrK%UZ4ZBA!%&1;@<}DPPQor4fm9PmfX7T(m!x$~?7$2x>sWxyU z+nQSHLwHyPjfUzfY~x#60*%#8Mkr{kY^-apZV5Ft27+d9Ih?o}j2aATaXRUl_e}H_ z7BxFMz9oUxE1T%;OUC6gB*)DPv7}KHqcDsO6E4D!hZOeFb~O<4o>^-ZcB*0+WRu+G zx8ije+x191BRVJF9qn?xby_`g#)-8Tws|APJA)fd!`3T;v5c3PELO9+3TYdG3e^zx zNa4bW8pH#}?l%Xp+y~$}fVW+=Cc&Il3^HfV(iIt;Q537lWZ>*Md4`E7@Hh7 zCP8yoG93vgv6)WAqd7T-n`@+mU9^zoP*zRG;h3S~^3i1FiiRA{%y?Bqu_!gaGdfG~XuphnC+KYF1ym0^hJ5Pg>_3u2g zjdq{Uil-cw{3DHV^G`HN=btM&(qY5%8V^1PdIw*c@*X-g|M>EHJmM*ieR<*R z;-lJ5O<0;+TQOs>PPYc?8XKw`TMM6G);jP~Cf z&FE$H|Nc-giod3f=2i%qEJsK$%?DqjW~qv8cXe8R#sm9GRN)GGmY zija(ocw)WS*U5Jmys4SH9o%pSrWziR>Zz7@%uSA13Y>^x9chR~b4#PibU#1A?Nl|0 z52yHGS}EW8MD_V^nS*A<*fhyoFDn*~d6&?1yT^?cCYalRHi{+An%jWMF{L$}*bq)( zzZZ_hMfHu`qL;M>U@pc${qL=MyR2V)%t0@S3&KFlFTohJSTw{ZtlI9LNJ_1y9YbpC zD;l+(q*3_7n}5n-_=p|9_)LedeX45ZnZpN{;rF=kkXqh@+lD8Y=slg(bb)kA6c=>Y;L26j#r;*#DK@q zV)<0x;PPI6EkJMJ&BcSBU!F-vd(d9A8DqT#4l4fJm^23>>&1sD@g+{RQGBK!1jj)yobL`!@OEP&ArOew_Op4os!mXgbM{ zXB{Ien}V(SkmCzFzuOk}Dr+2gpO#M)*>GjMv}x+c8!YC3wx=-ta}&JGHq_= z9d$Y6d*}0mFJcLukw4I9^SJbir2qd_Q`F&(=`}?i9^3zFj?~a5xePg-Vi{T< z!|H&3kUx8bs@ejLMrdVob5l!e$Y@rdv)JTVr>ETM^V!T3<^6wYvN(~;Z}W-|ECVUA zFb#+&_7Hr)Z@>pgY&UBgn}SU>?f?BXzGD*J#cbvH3MbE)xBvCRFl#(czX#V9j9|{l z#YugOnp}Yw{v_7GY`4)8&&I=q20OLgO7+^eZ*f}_0CjdtLM z=k0&GV6%JC3KzZwV@;@EAKtV{j52Fq58geS8!Cgw*1@VeqpsO##j8rF4j-sGW}#|V zxf466a(mP~S6=wvBTG9MHb~;NB{&%FPUAy!RE_GErf{y?s7)uu&u;k9P3}iJX5v(y zatuM7DrQ$#ML*u@asJz&5i63{Bc@N;n2+VQOMWHeBeA9*{ky3<-EZL0BfkNOq3*Kz z%6%?(xZGoB7v}x{IbY1HDx35NPg6E|EPDt?`rV%LutN<&Sv7@!txJwUo6~{Y(IlVi z__82B&C3V-gv#2w#(?+?Dt6$@`qwA0XyYT)06H=bUtW^CM!TwsxT-kDrC;@)GQI!W z1S~uryGQJ^@trBLDV>V+XYuxyT9KT7e83m)3DL9|vYpAnNZNc=td!ES_sh!j_CHl@4qdT@!5fd* z0B7)VO$@dk%zyl3TiP+VD$}3M4Wx}tVS@o|GJEig?E1LTB+^aUh^5omW~RIFv-#3B zaeS+iEehjG@h&Y}l(+w1EfTlTAs2h>7zXgOLA2Z2jW57!>gXADm3ZY>-{Z4JeP>Qw z{v?MuL9A2Fw!HnX&$@ZD4f*r;ye(0|EL7kqcX&Fzis!K_}t`028Jj zAD4Ru#UrXye7tKAUYpp?X0!O^i9dhiR)uYy<HBhZHr5Td{Aj`B!6>MFNmiBYF02>(1wFMa&WhVVx5Q-~PHAFstnqHtz# zo!Zu~>kY3@XV%H@+v(KV#ea_`QibSQ^^yKWGPW+>myUK1DefBIwnIZKoC`i;29bph$KWOKi|7v~hmb?b zVdQ4yaB>7$OpYW+k)z2mx=9b|C4Hoy43I%GM25*SavO46ayxQ+atCrp zGD2bw$azpsR*;n>=7h|zBCE+7vX-nPqhvkVKsJ(1wo zYsr5-iD=sIpT;TJ%`kebS097w2`qLusfA4`=X0_0{S{lUr)Uo zdXJ_jsi&a#(%x0ePgBbvKdb3|)cc`t$m>JVXO01+!;m&HW;b$oau0G(McMAX5WhF| zeKdVvYWtDM3Hr&@Pa#hwPt)Q~hy5+}Gd2Azls}vLIpn!o{&~nhpE~YE-V^T2aUr#f zU|+21m%zVN^Dl#cxx+C`l(|At)FJ(=pxw)Qt|qS`uO+WT%=NTyAa5jZQj~piGvaQc zek*yKmVZ0)@1TAsc^7#%d5_{SdwlMren0tuqP*UN)E*)qCLh6d9;N*l`8fH6qAdR; z;+~@ZH2UKM+Rs3LHt!4F@jTK;NH1vRUu67C&|jwg3i&Genxeek>xg?p^WS95ThQOu z^mnMgOTLHv_i3|@vc3-;j?MhkKSs>&NS`QQ=6?$RGtK{;v0sp1BL6GeUz6V`%JTMH z{SI;8GyezjM=kCrYCj|H7o^{iens23#szI#*82zY|J2I=B_I2ag3(GVD1bjGpI=Z& zy$JeXO&>yiC^?MW3~|G0k06T`#q|qDQX8eX8OE<*47IW3IC8wAEH{DLM8r>`Jz2}& z9QqXMQ^{#sehKoY=l$VZz@LeP{wOez&P1Ap)P^)0Nwjqil8<%GCFhaz$pz#>#G+2o zR@vSy;crEI5m`z)$*mP-ybJMe>K;w^Ivm53)H&Z|e1KYz4556OcA27#+lJb<6-h+C%l%URC~=qok775*ybttNNU^4Cz?8F6bheHZv<-XDQJDTpH-fYgrE zhh*jR3p(I;YJL~|gyye) zn5G{N{Rkv+Z%1nSQSgsu{4wOQ7n}_J6r?klcPe=rdAedT zVnmW{KMV1X(LNjYIY_tDKNt3SNLSE5ANB>xmSr!bei8JGXuMZ%lji;jJX5)oq4_BF6wua_mKA@ z?mpW0E6REwfc~K7KLr0_#yz6xk2)M9F($IiQQxEQqp#HbC5)>=Ts7?) zEx#6eo#sa!jxm@^vL73ew*jdUDNd~k_EOr-TDchX7V68$X!AlL&w_5`5o{(HNT5-3FzxIeLehc=Jk+CGNr}! zB5qgeq77Nv8O1R#LhGZ}pVwsDHc}rz%ptTlk-L$*lY1a$Pi4!pdr{vT`aZPx)$;eF zwm*3Q;tr&JkQR5a!!dRk^+PrNFlvXBN03L7N0CRP-eYJVt0>!d9JS+7zkN?9Ivj;> zBK{<#zmQI5%qg(HL^>7eW~9@opH7|u?J}e@>7PZOtthW|4z+X1^ALYN?F+~Y6=nQI z)GkKcB}kVle=M#ql8BLUmm}{A+E;4&RnV`devPJI3;jCHzn(ESK);dpO^RdxpnnVU z-axt)=>=-H!M>gL9k3s!eJAX@lr7tLH~f2;cQ1J#dA}C-0JR4Z|B$lB)ob~WF#l2V zG4gT5KcU4vN&PAEY4RDwJxlvJ#c^VspLaL}bNwmS!%Ld~GRwR|zKZ^AYrqX@5d~N`9s&`}cEdUpO4&xqhVn74@&l zZ&2U2w7(<2Cx0M+B!41*CUI}F&R-R!{Tu50UGwd^@~6_q^ZpARSW^W^Gm!?Vyh4bg zj>00Ou}FjS^%M?a-caaV_l28j`NNq%0{O+7J`(;Y%^wYajPfVAHGLfP@yaipKu#nl zk(0^I$tjAm%~Mf+8ub!#x|Tmf*%SH^XCNJgG)v2$4Sx>OzDRSCwn3VQk zZIMUCmWF8 zn9ra1IQ*q~zpxp8O!HeAx*Lk z@o{C#_P0~F$PUFxh%fA-mVmtxX&q88we_&Ol`ZS(fuCevDz8tvj@qtd8gZF?eqok+ zA9T!FQJ1W5gTpbIYfbtC$lt`g-L$yfsqI1Ti8zjxEVnoOeN^6L_Iu%ejNP9+fIN^q zh&)(P)^iBzIh6Wg?@vA%{?SMeARU8r6U!V6`#7XC=pPUJgnYTe6XBnv z`KU);{}l36@-*^v#mPUR+?hz1Qa=m!*~*sX&w+og=AQ@ue9gZA{)L)<5&Vla{}N?y zZm*fklwWu`c?EeTc@=pzc@65jmbN`7uBU&4qHO1l4#(#9J>RV9w=nir@;35z@(%J& z@-FgjT=yQ@_mcOK_bX1BK>tBS*^Y;(J&f{?X!@hnA0x&3euDOsuwSJ86!|o?XOu1L zeU|!j(4VLMLOy>A)>Pq3)L$lF$>$foO6@hoy{_qRI2=>#__rAIHh$v%5OME-TZ8X{ zxYxq>kiKWk`>;Qtjabn}+0Kuke@y!m@>B9N@^eKQ{{^)#$*;(-QO`G8+_%)fb2z3B zq5T8eHJJ8~h{s%(b^fdpTT zc5~=cXiwGhr$H~#{OOFDLCz!%#Ld#;W>cR-&L!ue4>)#33$*x!D8B_`w$${kpf93c zs_9OLV_Fq;7h;Y#3ktNiIP=+b{F0;ny&)maHSA$XiOgo@^i+k=KO2;2IY!YGz(c zQEDwJrf69{R$g}n;#aCT>9@jP#qz5)eJ5&b$ej_lmi8{N+i9C*8yQzD!F(yQs9_Dr zYjx)Hq@O^{{z&VXzaI7g?QXJ%Od>C(Y+1IK`mSV}%pfkS#r47O*Zd8P+o)KAdn($b zGwpPu2X>;GeGfXDEBRSX*Zy z-KPAavtgfubQRLMTKswN&)5757=Iyo5%Moax{Qo2cE4xLau7n$MqZxAS)9-GR6}^Z7-0!M|Jc?}2}>=HCbZ zewKYe(;uYv5cx3qh~o5@Q0_6L7pXrE`w3;sc0Wn|De`IZ8N@wH`#D7!|2*^;^8Soj z@No^1C-Yy1{|fC_$=9^}*QvchzKQs^Xuqw+y#xJS>hF>7I~+5xR*ODR6xWmfNBIA7 zKIk8VCEzEF{}lG`v_FIWxw2(Dzo7ml`4#fN*7Coh{w?_(`91jq;(tW?mHtn#e@1)6 zJpV;ewomxKsTh$bul)!4C)zs#`F|;YutQPA3@)HHh%AIwq~#-c@DSz=C5LJGn<0O= z@@Me+vfg6GjwDARZnRcz4E3?(IC4BW0r3-&rqG`Rd$PkZa~t}bD+*tv!Bdf@5QF0&Lj;*8H0HxIUD8YAZ*K=MN513zKD}=!

Z>K2RvpuyP$Q=T2SY!E`k}B7Q?_vn?gL5Ibp&~&qSTI}b~Jemc`SJx>N=kG3FL|7Ns6+b zlM#1{=AX)#)5z1wGZ1&C7Izl)v!S1(>E}{E4?5?Ur+r8O}~-aO~}8QHv3uDdn@$YsNYWBLEfo2 z3v*J$%J{nxe-G_@$@|Fr$p^>>$%n{?6=gk-pq@uH|1tQFGwuoUN%AT3Y4REJS@JpZ zdGZDFMa5Yk(SMnIg?yEK4f_)2_~6&+zd^nU-3R?G&;hkSnME*?vLjI~ayB1}C zL;9WiALO6pU->w3tsw=xYX9`yN|z5xD0=50Z4Np3|hQk3V?!C%U_W=)SlZ_)f^@Ru`gg{H5B-b#HHxtiRGT%$Obl=rxndApD% z*+#}uZ#!*EQI_j~-l_Rr@DrN9j%C(E@21^DCbhT}wO(>pGL3Q>+F7!X>?b#n8_5B3 zlcH?rZm4H>>U(JVp30v4DDwAa{yyZss@s?OMcLNBep${|0I| zk~bmlX4u%`xP`{VFkG!9JfP7GKo;`OTrv3fe#ylRuC@l0T6@lfRI^lE0C^lYfwZl7A@YeF*%YP1&*dYh8(LnpX+Goc=$f%O#mm-o08j=lH7`1M3$0Ha%<8>x4#6 zqo67h^LxWSuTfmzYYud$;)(2lsL?1{)@1uec`DtTN}Jh4DrvY^gq-fM)8liaww$Rd ztB%CHVZYDgF^cO_=K5}PLsHe%+8C*eMpiT!wTdTqA`rO_?*cCL28@u|<4OSxFo$6~>b=Frei>#=uL0yf@Dq@CvwcF=(dxK7QK-{P8 z1pUt3je5{?Zlk!l$4qAv$xMHuyW4X1o6f$BQL?feH*TaljCw6|k+D2txLZA5RP1pE ze5f~XYt_2le%x`q3q`H8YTfd3<)QC-dis)yxa_?Qn(6VO^IT51FC?m?6LzzivI|2l zHwL6J*}0KJm{FBR8>&_s4QfCS+~pOQf;j-?@A<1qs8jB z#Eg`cxV=879AH#JJ6DM}9CEt@MsZbgAnxo;^&2G>ODY;#MM2C0AEt-ssJxvYLAM_( zr?{abkhWUCT(U+wZhNkYzIVBC zJ5^?u{T_?7%x^JbR=PhCw=#A;tpSXn7uATVMcZ%Jf~nvR2f_iYrsiZO(Ut0MPb51t z-B~VxW9xQ^|uD;GLi}PxAwRG&M zWyA(DS*r)Fl(S)V5DPRYW`t;^?Rat1_C)gs{4S$7+L2Brvk7|ztQoO{{y!#}SY?qO zD~&Z__ruDd&xsW#)&;Hr+v^m)Ywu7nf?^Mlt3RH?Y{TEa^+ri+x!4cNQ{^dKBbl_~ z*?cc74+fng`dHd3IyZ_cYZ^(!BzkVRuo*w8{b2nC}Jfp%)V!v5$M0&ADwTAr85Ehc` zN!q#bMzefApSaOgnJkKwCX(%anQS_NValYs`^5H(ooY=%tR>Q1hxS!0i?uY@RK|>0 zV{}6kc|CsIN>wM8nq0Ljs#;q9Ipw%) z!)Oe)aXbh_UA7}OI`J%&doMZ$TU&K+cf3!H z{sp{F{6%d!J71Y6ga;COd4tuK*|pD<`{X9mC|SM2YBOR&$xYjZP1yk~;VhQ#YPSnZ z0e6juHXdYoCs(oC>kW%}(iD$p+R`brz0FK=Q)sElbeT!9k}+kBrhYVB)VaptcA-Xh z04rHO9yKpF8Xi|zj7F`w-o)ub-F<6BI@tH{XiLh6nAvSCb}q;Gib}Dmx-b@4yrQYL zXU`Kk)jWO=mRe(6Z00?wwgeu!VmmKcGoji_L{rJm+Ju?xH0ov6-v>^3y=HPCzp7is z(i7W+xSlYvd*)V^Rl`R6FTl(pT>1Y)uqywX*Afa$ME%A=?owEEon2E!SN=` z^UZ2E&JJR`!+s&IK}VY;ZXcdw*xhG;V3f@MRty*(EbfrYiD$4Vp&T?aKVUwWCyZ99 zPQZc_4>V^yiN=j9udZo`v{Xhb%Ny$r*%g^Sqc@%EPbA|w$MtRMO7$83Dg$#{oaSV= zV_o7Ij0G+)nDewpv=>tk^SCD7(}v!b{ZL+BzgU&V^zvg*56eLlCkS{R@f^x+xbBcY zj2(TGmFzYL>}45UMOQopu|v3Vd2|m7*nsMOb2 zwpKP9_Kl+xQG*9_DXb2HbnN=XsUzSEqq@4zRJ+;U&QoerRjR`F?B|s@tcl~5JYm_6 z-`+9ABngN80rbwwOv381l9|#j%k0i#ep;9oT=g-PWmIcfMx;u1pr{{Pl6qXp3rJ6N zq}WHWCwb(dwJw7yF%R3F>v}QNfo3aXndx|!(U|TulZj1g#^G^Tlk7;Pdz6c%oPPl4 z=4?$elTBp%vX*#y=ECFRK#76I>3-4wqAD@v)=Vgug4k8$k^H`k?z_Rt7`1)LUHhy=+B7Vq zsxN`Z^?m0-#zD(eR}@b_G1b)-ZBJV-0UTO!!N#J+9FHr`0DoJFEIS|x@=wuw%DO3Sl#Q)hRo&BRd4)0Q)t zGD@1NqK%6q(elQIhRX65Lk#k2*%)*b9yqK`IND+kxa2lnyCETZ%yQzXTGFsWIY!Lc z?2L*7gs298J$Nrv)kw#lFJh0uZYvHlP4TYIQgNni$FoFrePelbRcU=?b4^7}q`_Xf zDDQS*s(9r?Q1kM0+U0ZmaGYygpX$M)%zE7Y)_OS;S7O=-#aOu#Z=F~teR#pa)75UM z+wE1SB{`zPvF}$j)a7#pP;E`z+9c;BPMh`Rm8%*mTa1h|Eze7Mo{ARXxg#DX>V}o4 z?Ok{*`9oN9HQ4nsR@|AfjFOs&w2g?`-(@A3uer{bk0f}1yGbu*#`2`KF{?&2!&ej4 zd94kVD_aaTutu!1d3jAa7I4(rl)G8%47q9N7LGRW+#auZnX9+@GV8Ii;(UoEi1Dqo zeT;BK9UwwzXArli)|Tz*Pjrtf=nY~uEbS0)V5RuDfiDrMv@?l)EGFN|j7Dl)7MM18 zdSC!TYWC5wcL;3HVYfVzq|!LYi3bzL+L;z->lqbk9J%{b299aWz)rE=j3?Gv*tNy6 zkFwF8N*Rl-RA<_3FI{C}Ly>!hxH4MrmM;sk9rW^jA9MxX*eqi5ZAR24_9v0V9@Q-0 zXmanoIM3q?gqRrEi19`)_8~Db=;<#>7;Z7aqVaAsEjMDPDfYZEEmph>S23dX_)3B) zEO)**?gU$$Jk^=;Y@*-H=uC)%&#;P<4ce{)2|lqMt*U$6m@Ej zYK(Z@^82uJ)O7dZ$ z(|x_EE~{U@a>zz5EXF|#O~E&wK+q|l^f^cCaDT|}K?Ms7?l(%7G?>XOZb=mLIK|tq zJilvB?z%X%h_i8hW}v4nk#eg24==DW>D$L5@lX_{v74&hDd*TNKxYK~V#Jqbdpq@o zEZ(GJElpMVWo&<%L5&z`S3sRjZ6`Nkm_%-$$BTQaOC<5s&f>F#*g30elxMhi65oo% zK};T#ZAWYk>h>zpCh>h|VNnF@vq;QakIV1GXHa!uqT}(S7C8D$jZZI@*<&p36Q}r0##k2P%oc4y zJ85G_}p{A{NBL_7#EqrBo>RaPTB>~y)o4Z5*ftF7)t#&mWh(27c7 z8jS<%@L@|FtS}SgI4e_sqTm!Q4&%}M%bPfmudI!g8?!5$OIy^V3+G7rVG^hF)v`Wu z(i2Y(%~7opy&m+V#=1l=UTQP!>#9U=weaK(iBk%?QS2L{270+c77GM-geenkH?4N7 zyN~Z{%`IIPzO^aIs7j~$dQsm*Y&-5Ud|NIHn`LEUJIi?;_*3>T|ES+Yyk1WT7nvQx z8~zehG+7q)`@>~;jMz0P*Zj9yq^7FEr$!IXYYm-g%Sv{1rxN^9(^%D9S=nGLi8j_$ zs1L1crpT)Vg7Gp>M<8xrCg=Z4YwhcxYr`QI&Ro@a6G7j}PYa{e_w#@Iu##aHmE-$7 z&#-GIVViT8;dRy}uAH;~>#Fvpak#~Y_n-@Bj~KpN#@A=s6L@>W7Z$#v*W!C+d0kBW z+=HGRkD3GMMP5Xi-hUqp`x3aj03HOm0BTKaN+sL4Ce^ZNTCUXmQgyGH<_Wl5W$28K zG7~>2h&$2*+~R-TmKro)z~{vd*V2YV`cDdH6AxgTIz*qRLOTQ`vHZkOyO19 z&9e5mb{Xma>-4s}R4gQX?Zd=u%%<>DjoR$g?ZsO1icw>KqnBOkE9>x~H$B*$RF~QU zZs~t?sV5v354MF9R$_;qfZa#0(D%;@egC^cd|MSSLNR<(X-lPXtdify?EN4HHH}lP z8~b*|-nb{Bzx-uBQ_a4d{a@o@cNXTKH{iwuuf~qUmzmlMn1Ab=U|EN!&0S{U$2-+G zy6|6@z;s(M$tYeh((TxHryB8+i364RDY6;O#OKW1tA+fn zq`smuii39UgB!X(~5Gx*xme#ZBSMzu4K9TAO;~cntR9hY))qHZ=FOSxNq8 zYi+`H#-naO)_usMu0#8Ou6K7x`~tY57heE-#rJ;v#rJOc@ksVwQ>>*j5^bqA@WQ^L zu^HQ$x~I0XaL~RdX=Bjw=Rg0!c#C_&9xWaUQSsJYn#CKM{K60wzYmJ{HT63VM#yxv zWB>4Z%dj5g6YTH8e_cv#V*U_D7*AU>BQ5p$XLY@a^BTSa;zLQwh~OtHd>W5s`{MXM z&wE1a+tqw9v31*Y{po?V>XYW$9&=qPy;gp3PbJS8 z^v~QxGM{^PU0YwGyM3+In~HZ84eA{9@aXz>Gi$b)8Eb8Jpx0_m^x_>dWh8pn;y**I z&6qvC_%)%Yr@gJN-O9w%iC*y|**lA^$GwOecB ZDf}pQ@6g3{X}p@J+t+3io2+w;{{xjsLht|p diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb index 88428315ee8d6d164a89a40a9e8fff42338e3e2f..caa218f02770bebc48b634c15ba74b0f9939fa5a 100644 GIT binary patch literal 30088 zcmbW62VhjywuUF6gx-4{ItqlD$)pV+5SnzPBOpwY351eh2wg?7D~bvzc2H3f6?+#I zd%<3?0xAkBV!__tfA;<+v%zrndEeXjTkBtIuU*dGXHK|2pD)ShtA4D{=Szkae7=g( zd=>F4!OAcNR)JMvHCP?ifHh$)SR2-XbzwbNA2xsuVI$ZWHi1oHGuRxqfGuGw*c$p_ z8`u`MgY97l*b#Puo#9dNXxIgIh23B(><$Ak2tzOphG9C)fSE80X2S^V0eiw;@EF(| z_JMt24(tc}!vSz090YY{%()p1hrpq57#t2qz>#niJPwYAW8hdg4vvQt;6ykH9uFtO zDR3%00ZxO{VN^%|LY?cKlQ1FDLI*M?Vd1h2p29 zKkCHKBz_h=TY2YTpX=nEhju<(sk{rYwZ~@etKdcOVoMXd1Z_23qyB51_@&rCNMEP+ zGDV-6;Pi$G=(rCUS3qo8hhS zHcPX<+tIebJK&u@UyZKVcUzkLdsP2k^!wob@B#RsrOAH??P2%`)U)($#eU4v2bTv7eLvy!=Yy3yFN+i}GL6Pm6a>wu?H`zL&*-_zHbr zm0wf-Yii%9y{`5#u{YqG@GbbZa^At-VQJR!uJrfN--jQ-4;9;q{gGu2ojI@hcB$=7 zXkIN-?-S{tV(+mu`e)KVN8jt{`_R5n{7Xmw3VlEPTJZykc#Zg8d@KDs?T28-`5qpW zptEkYAK_1yM*A7<7x*ju4gL=QP@O;3{<40}mOh`?yc$Uw)T6PJVFg%GaXs6zcxChy z=~W!Ps{CqdHPouB_DCD^7`KyZDb|MmwPn}At_$nI`j%!s4bU1YzmcOiMsFg$sakw3 z&Cr`mYoW1MSl?q(D`Kr3&7a6gYD>JGr5V4y`gK6>2s=6P&S*zL-HW6y)~>0$o1{CP z)Qwmw?4F35F@oqJ<>slSsf|Gk%T8D8ho2!k6FUoLTbf!Cv>vc0>;;ce|K8R%`F-T~ zC6<%WYkn-fKl%VTFcD80gyx-}!HzxzeW>(djy_!e2x2-DNylN2vaF@&oHRONdvi1M z8!LUBwT(X>eS-9f*pr<2@zN(d{uFYiNrhz!e;WBKq@V8SXUIR3*jWj^)@JGFsEKCX=gK}0`+T@k+Er>7B>E*?NNkn#iyZx8 z`Ik8UYWZu(T?;RD;+HAD&halN=L+dpTD#UxCw{f`Ysk44u6N=a(5_S7_1HJSjZWT; zir?h;H_PAT__tWQc3sP)TNS?z`*zu{t8GzxOzjS*-<|UBBJXZTzeoDL==VAL{b&!s z2bK2__QOuzBhnv5-)dPq{w{gk$$NsFZSYC>l=?sI}~1qpznb1!uR0&@B{dv`s~8~ zNVd*JZO*MnuO;nP``G3f{}cJ2CVX#fGyZ4NKUX`bw%2M#-$(ol>0e@h<;3@+eGLyN z?;9unt^DtZeV@>4Yb{AXp#KPefxGY0|^k=`aIk!Yr7r-2Q42>zlegi1n1- z%h8XK-`nx~kf(h%{c;rVmx$Nl-XsqoJ`fIqdgi9zU^oO0g~Q-*)g6I7(z33er|Ekf z91X`!Q; z;PsZK-wo=w5&cHlH>=&Gz5dZ^9^`Kz@*YHcNOA44nad-Ics=d2(YC_Jl=ryW4*VzJHuxlb3O=peXRK}NJd6Gu zd>+04UsV1});9Uu<-hFsugHIu+}Ggi@C_&LP366X{x*Ck5wFLcN`9Bvd&++w`-4PY z@`q?2DZUeX7u*d$wlwqo1npC}NBus-{@k*j&Zx=Vr`Vs^U&#K_+9vju{QZvqwfqB) z|Bd`_9sfIPCw~tQ!XK2!nM?l3$^TjUFXa3x`*-Z$EbHm{CjXJJy*ZhA{iVC-Q>$bB z3Q4k)t?k7tC|IE)u}ZKqOmXt6D6cAdHAk3;M!uq|u{+rtj9BkTk_Th`COKUy{W zV|RgFrFFwjwKVhUjuucnh#i7yFbvZnX&78<7bl-k>116d&=+S_{Wgf8}@;H zVUF_q>Dd%m-(!UV);8k|ByW&(&QgWJPJD>;q2vsM!{G=gZ=~`@p&tiFJMl4!k9GWU zPhWwet z3gIj`8_t1qVG%60^y<`q9_>WcE5V)*OW^{z(9(>*2yHQ30++&Ns&|sLP5#N~r@-Zk zpX$UuCg@!E<(E)UIJIE{~GMI zmJK@NUk2AH=VrCb@#D|`O6!~YS1Eoq@oOCYTD0|WgYvG!z8>BHH(Hu{H!ANY>o zpH1Z5qS!vQ&1z4m-RktaP5$kUzlA<`z&qhx@NRgI`rV6tpQV|{{n8&mf6&n%lKwFI zBaZ$k+E&FMbM(ip-9XQz!Zx+H$a_-uQ`k?#XW+AzW**Nek27ri7v#T4-b-*hd>OuC zY4Tr1drkdbcl0;V-?VI?xm0-DiN8a92YeU4r@Z%_ybt7mNbDoH6Yhe$EzNvBM*Bql zK6Ugx@;`I@&*kq;_zmLs;|uzKsrXme`xAK;zLtIf{Tui#{LabyUU>%{{|EVuVdnW0 zeSU_&z+aX3o73-i`F{}m6aHmc(U-_;n5zG|KN-D(&sX^}`AVp$XIQZkdS#dbtH7$T z8mta$z?zocyermHtBqa<)`j(8edXMM-9UCj>_)J$v{TfYSl`rbN~{@du6PUVmavtj z$#0G3hizb6*bcUb9biY;33i4@!J}ap*cEnz-adE74!|G`!890#=`aIk!Yr5#Bd`bT z346g~U~kw5_JujHAM6hYz=3cO)bliZIT#LsL*Xzu9FBk^;V5_<91X|7v2Yw54=2Eh za1uNoPKHz9RCofMW@+X=UC$$mo(uC}K8(QvIK$HPpNUooXTjNUj_S{~w#hG&UrcNs zJkg1lpv{M+%3t8b7osnMi{TQuRQb!WPl6{~dgC{YuW`BZP9=UCTmesqXTUSzS@3Ll z4m=m02hWEq;RWzQxC&kbFNT-E)o_ia*|)Wt@1>4^nf!H*f4TfC=yxT&3SJGb(fV&z z-nDQ&+yJkG*ISzXQob2~BfL@lZ?d-0!# zIPrTGzt8dSC+7j_4`M&$#2=Rah~qyhe=B*9!N=hfaGP?TReMtFKPWy0pN7v^ntk!& z&(ZIB_=2VB`(h%u;&%O>q$~er@?L?j!q?#I@C~1@lI}>wH^o|LZ^?ff`yIH$((K>6 zXz#)ImH&Yg|4{x%j=xj>E^>FnkKrfqQ@96y20w>;;XX?-1rN2ftTbdu(d{0DrVJ+E0rA?D)Ts^DF#K@!!?{#Q(#xQ9o&aC2X&j*D58c zC0pD070@d}O|VjBC!Qj|3bCrNnzZWHZnRc*4fL7`&D5@~*a^hyz`D}vS=;2)M{gj# zp`$mF-`Me+$Ztw+GuRxqfGuGw*c$p_8`u`Mvo!N)ulgO-v~R}mD7zDOXD9C{w4-4c z<#omG22){o7=S?-vNYqQp@m^O%uwA-Coc;Mi49QRKI<R4!e6{Ntr&RU=?1gX+e;Mi-nLe+= z*DQ_py7J#Ze-pmt#NS4H2kub*yH5N)^!KHIfc+u-NZLikOHZ;t-E^gq!5bo9UUY<$+QoCK2-@yZnxuju%dR^=2}#fevyUJboEtdWQ} z(b=n93%xe11M9+ius&>HX~t}*{*BNZ!zNC=sp8Gho5L1Pyrtr;9A9U(vfuIB$ZzZT z?W|q7J?sEG!cMR=JWBnKwzipX7x`U@b%UusUsLTt6dUIMB%(q&&^b)E+Fm1bc|=q1eOVa5w^v zgrnebaI~eVIR!W-dD z@MfsFn7+4I8hbO^t*Un$_U&+slXr*WccR|~?}qold*OZXe)s@<5IzJShL6BUHOAxE zTj67tO@C7C3AAnS$wbVod$b{oYaj4)k~7d+>eu0sIht1b14RI=fV7H~Pnp{)zNY(f7d5 z;OB5J+y}paU&628e#>UsGjB{Y?g97>{1$$valgktXle3)kp3gOo|(~phQBy@zoPx7 z{NJ(vaN>VT|106A`1H(@te=wX=oQc^!b-}kjIB=bb);00UKPC>tPX3yny?nEZQ1O6 zORrW+U1IfMeQ6DxctiP(h&6^yU{fcrnev+Ze9bsJ9#dM9*UHIjE!~ga#?jkKZ|C^! z$>|`yqoa3{-SmL_k6^pWVJe7@#$tnD#n zH2N4g7LJ4CMZIrQCir~SW{DF;%{gU~&sX&ta*mfj8G8zx3QvI3l=HUQbhVq*qH1Rm z%axsnoeyJ{X8r|=&p@9E3!&!Z<(RzL%42OQbJ2=mF`Nfage7o3EQJd!P2Gj6y9j-; zqc4%Z6nz;y37+iaoua(u=%>Qd;0ky;JOiF-+5A3BGmo=X=N!jBSN?g9&)rU0Dg6Sg zr(6hE!Hb;yiV-mOK%)g=Vr#(WNF5~1#L6D72XDK*L=2M-(hL`-6{Pp^t&DX9<+NEXa7^~ zM|%K1=;S}7yob>rfsewi@GdCJFPRdPOopGx235f0F*Q=9@(97x*juP4VBgXF7)|e^{Dv{v`gF`uY;Omscg(#;R0+ z6=5Z)JLuJ@l45E4sE9|SS5s`6T6ML4YBkVn%I=I^3)Yra2fHq;2kXNIupw*&8^b2B zsim1iGu3VG_$}nObo^HGTRXmAejCScYwarSV0*F60S6J|Mi*~*Kc_kcZNFL;clsoNW^kNWj>^c-uq98J8x z;seyC;}3*`r0Fb}T7%&bI1~uybymj28@7k8c?uGYBzdw;z2A_ma!KYR8 z6SZgXpOyVK_H*!gX)j>E2w#HR;mhz9<-V@=s-E3fXs=m%ew8=S-h^)@V&>VrgT4d4 zYiVNdssH=vAHWacM{p3RSJ9R^4i?wfA20VkTZwv09E_8@&#!t9U)^`mh0P zs8|cNMn0eaXtc)in>c<`^k&kVCv-2rtWPV&PE~8IHb%`)pEj_qrI~L##oMEIfE{5c z*cl!L<8$kR-&Hkws&%uzshcXlJMjPvI`NR=Y3N}`PnVwI_?hyv96y^rI=fYSSlhqU ziT5IY4D7AEK2E%^{2XHaV1H>tu?N6`(gtZyU&4<+-@*7pEWLQuVQ9l0ZG`lZ=%XC{ zI9s#o=tOO^Ut`G~2gkz+sxuLL5n+1wW#&Yc)7&#q~|+&481`5 z3`d_SztHh#kvCiV9PGKU$jK}A`P%4Adt;h$PgH&h_Iy|h7g(CSh00rmz8Efn?(<%T zc9QZ>#y$luxAf|@(VVKDhQ0!xo`_dH1MN)ZorQfiJO`c&&x7Z~mGA;fGye6zl z#qbiiTH~#;w&}MP{Zi?dVXuRiJ9$?qekJ-&$%hBJK{to(%gl^XGp7o9WzVbdG??XrbNcv9nU2r%27=8jjg?r#< z@N-Ku&R*5qhyDfp(useCwjX}2`~yz>8~NX={iybx_Ay`l-qO@RDF26q@5N2dPw;2O zf3fw9{;T}o627VNhcn)vivMN(YCf2hh*wKStDt{dt%zL-R(A4I6t99_)zMY7T6M>- zK~7ESwXkc$I!<0)G@XxX^|2ekhE851w8pRrYzmvf=CB2930tXtYiz%zw@205sI^V_ zZFSG8wO1Rg)`9$viFmb6(mSIc<>*JFb%9-#*9|+>$?GmXfF6V)m5H8M`@#N}CVv3hKsX3$@4R@s_%j=#dPA*d`VAv* zI2-{-DsL3_ad0#o1INN~a6Fs;$nb>E+vn|d1&p|sEo(Io|E7ku3>ocl;~lUrF9omhGOxzXo1wY5K26+W@aq|Ld`D zaPl@vzY+Z=cr)AtZ-JXFP5oQd?>5K3UH%rwzeE0=^t%h*4ex>XTAF(I`F!mgp+5j0 zbm9-G-ot9U)gE#5N9AvI{Kx3~IDA6!ZP-sj_RWm@wBpa8KMS9e_7?W@@CEpyVlQc~ zx--?bTbepA%YOy^Rrnfw9ll{{^53-eOr5u>{f?vUP`!82--GWv@edUL(D6TFjGfYV zC3G{^$MpXMerjoAdzAkf`sZ*j+~?$df%c{Hzrx-RzlI0kH}G5d9sJ(XjC)XZen9`x z(SMTuv*Z6l&aYY@=fd>=U3q^vd4H1km*_)Jg30h$v4UDf{7SGgOo3HkRp|Arrl!K) z*wt&mny?nE4eP+V(CbqVyFP3H8$!;0^~U&3U{kRfc5}-P@%gnxZw0+^TC4f-+cdN^?f`8&T#= zD~isI4H}sjixw3|2jvwnC@L*klGAtGFiaC4+;_qJ(vs*LWfa9q7ZsPx9v4gv2U7Ka z-DHGQL+R;W+v88l88FUYYv6zdCB^e%{th{V2lxYn2A9OLmscC7cnHlO= zX40?k7=Nw)ediZO)ob`9f8f~R(K*p2oftOsjnoV_7tb7}!=Hv7B2y(pIt}ral=}lzD(|sR@wUrL&j_b-OzE6#L>J?S zy_Lj|Z_eNmdMOMp%J(j+o@82Tnl>Qly?x3YT^jS=D(jP@i!~%CS~O!$G(R?fCLJ@C zSJu%uf9u%K8?4{V!Xgbe$*ZroNOp*^JUjl1EE{UfAb()+n8KnN(RsxsdIhOh*n691 z=#F{2UFMW`XS5y@*ToHmy+h-8+Jl_F1N9#6Tfpn4{}8=V`_C%OEnZMsXhzb_(TU44 z^NZWTklFJ1JGH;}@*Wf`nHw!yV&6{M^^A~q-CMl3^N0A}&d0kB9IUPb3ybV~-+Qqs zHA~Ofyx6jB*Mxt_L(>BotcO-y6fyu_dvA0d%#b<-GrJw? zYRZS-vdO`4T53oSLO)hIVhQKaBi3<~KhW`Z?_h z4ZS6l9(OViOV1fHO6PD$bY4`ygcE}L(bUginwe0-E@$$3MwxoFID zAM^*>|KhD6E96bddq0G91{0onKJnAgroLYDO}ta*Ro9Er{3evyhc356)F0?jV75P7 z&)qwkTB7&E>bPb$@$Jv)r=R74{h~$DeDj0sJ%Lb2w@q(H?>i&z#dqqEew}^0G%@dW zo2k+4hl#bzE_nP*j~=ICMlYCePUU3pWvvGn(oc!5g6o-A5S{!x{Q>`{-fFyzV1|B0 zycO9FarcnT${96G!;C8C=9qpVZ_jkm=~w0+a$CKAos)E~ys^A<7%+z`BdE!_Zh6nX zfwF^vwE!hy{ENHD)ZeUA`us+j#Z`#80UP0tHNf|2w5*VTeB;W*Og*F{?pyq*l)Wfyy{znTek3mti)88K z*a>Dw9P!`RYpMCgBH_%8NH7-8h-3!h^~yxe@1M)9)_ZK7w#}QK?U^15Yq=cd!^9)j z``a<`rdOacx?!0GWjEsxt(*rMf2w~g*DRC~NYnkw4nzt9nURogf44)Wa9ZMdCM@sC za^B+acbzv|D;m(QrA0!yxrt(qR?hTPEk3iH;S==&93ER-m{+Xrs~^hJt0_+l&GU-k zgo25g9AcAvMFT+^U9VdJ2x?Wd6sh+{0sHESf)CyLQy?99e3?{Fyy_#&9yvSEcXTC)r)_Z zjrBh0#x5wC9rHG zgzT!ldX5;2{==fn-nh12UMLjF$Wx7IdS1TwFuZyRF+;EU^6x)esRmrPh8WL`j^$>=Z2oTjLx}9i2v$19(r!*oRJxG*6WJ@a)!=&V_I*%u7v#Bi&38pO95p{f_W7pSj}0UK%k?Ini0TC+ zv8;UO1e$shvqS&1V)KGHe1^H+Ze`?Wn9uI`3=`u2p5c)y2K8=8Ghcj8Ma2J5G0(dY z1-be`a2|6){NEMJZi!#FM4$8C+dnO&ua?ZrT(n!NGpiMyl;H7{!qkK%m4B6e((7Q)0`ar zv+qlLsZT9pJqKM#~&EMzef+%KS|{m&(*}6XrHya zS(+}_+<5=|ffE1q?hyY9_O>89k{iuS*I!|2F}=(~<~yt$^$7dR_kiA>g7f<^luuG%mVh$@yn@W=IVa=#Q!!_DIG`sZ5@c@ zhSD<4aXdsQ|IBROeqeZX@$kZ;{M5YSx%(#P>fbv{mP{)s*1r;#PMaH@Ra`P{VXS0+ zVR6xhKVn^#geuT&%L&WRQML;4U^iO;L@U|mdofX|4{ iPpYV{LyC&?V|I*f)sG!o63Z(t$)7gAaA|CV|Nj8O1k}?2 literal 32292 zcmbW62Yi%8_qLY+fe?D{EWIb$&896NB-GH0NEcHU2qnP~s@TN>_J(2u3#eePSWvNm z3id7*R1{Feh6USqKQq^62MqfC-}m~Yy;yZ*F0d=?2D`%^uqRA{$uI>5U=XIl5KM#VFau`7EEtBpU~kw59tZov zelQ#6!2WOm90&)&!Egx7g+t*mI2?|EBjG4G8jgWu;W(%>Y0l+%cmkXNC&Ec^GMoaZ zLY-w(=R`Oio&@uB^xxEZ&sW>4R-ks7S|Ra>>{GF4$exKk%hJ>?LYuAl97msvJ`WZv zUgE^(qn|9j6ng|$cCOS?2?o4H&r?FsU(fLBVpiaebM z-_>fX)z%ZgM)tK%d=2__(yzz9!HM4}{U*m>OU^p!H)G!dZ*}r+vv#fK#5Ta&;T`Z! zxDjqr-Mg%9)_6DiJ@8(5pYrZ^@*Y5cQ2IlT{;>Q87JMqVSzFN8`zQ--im`}>r zej9s>?5D7whR-uBHcJe+!{}k?qpDFKi>@VOR_$Bozc1|zbke(C*B>c2kfc5B((s3GEA|oJ=5p&TBV>`YD}vX zlCQO!xu(Z-<7Zmm*jdU8JANp@z;{GPWsJ`evABDiQVSt>!ojS{M*U7L;9WA8{sA=?=Hpf zM!yH%3-5#XTYB@XqqA7)LH#a$SbRu~&eQWBk>65$RPC_ZX0?54k5T7w+3%}8f&V1j zB5eouQ?j4Ne#X-5&sOQrqCW?pcjCHJm9{zlc5+^n{*t4=Z0$PQ>qyG~hId8(Z6n`809r&)LneThrE4Tj#%Ky-b@1oB~(m!_ePvn2<_`BtQM(*eE3%Cb< z3BOYAUTfE_>+^Zdn~Ryxe#O4V{sta^-@=3NJ4@5=d$b?a|3~bf;2|gPXT^U({}uig zi`VUk_Pg@_!2T2drE^mo`)}DkYkT9zRkC)R#JI|iUPXRYV)2e%4Xrv%fHjn#=;YOu zU(5P+OC7zAboMQ-o}<^7-oW~C4PhhL*vV_6yr$^Q9KE^p7S^x3o_H&16VzI(C9AbT z_rtc9W*^!q-rn&$$nWU*o#@jUc2T^m6YqxJU3w2k?X(HbwlwSLCB3&=Ut)dWahCP;{NnndWjk7q&*#NVjR9~V9As%?gVBbl zPA>LPCq4{qI2@t8k!oYqMrjOhexohDdd441?l?=MMPuWYe?m+*bC@WtSZxyhCc`O~ zCTFVR(;WXq`O_W$B>8!cpKtBB0_la09+5vot%&$cILor0=Ha!t*=loQTHIXu^J2c4 zQ;D=Q=s#cf$=L4wSRlR3>ZZ>^au&hGiZ8)l3YS@${8JQP?)ayYa~eF|vff(dor!i9 zJX_jjYUkjufahA8`sbmY4=+&uO6*neLU@s->31>OCGb-1+gsR|J9$^A-j$AjmHey8 zTkYuApk1r@8b`kl{d(y)VBhG(Z?bm%%8IWeezOz51?^VFZ^K^i#5YL4-SO`r=T5j0 zZi06y|88rWeYgkxUU;A4_hUccJ{HT1dmQZv_@wf;IPs^@ zpO*fNqi>b}tm8jN-t+JU#kXN^hcCjH;LFN+6?+HVDeV=XufFbsw|J&yk+IbXrOitlsc`_aFa{tfm4_^q@bun)rT;P;mGb>6%d_oLcR#16$`-Wtt1 ze}TWk-;{sY+9v+H{6C2O>F9q+|J(XH`leRpN-!>#S6RWzRnV(Cdc5>%=+$8Ytf5#< z>_p4@dInyL&bxAL<!w3*;A)7jg6%(r2R2a`YnUvmJj9Ide7lo3V?Pvktoi&X;yFcB$-@*b878)cVal z7s17F30w-7!BgOJcq%*%p02UZu(qjtCi+?MY^rwX|9>yS!c<^6J8R%Byc}GhPGqhER9c z=#60$*c3KX|K`>QyaQsa9S!%t=4_h``DXn+R_Qo*d90&Wt zelQ#6!2aq#z}lwYK>34+4R-V)Xt|0HwRWS;Xv3Yn5sHsQ9|cFlF;3oCv~kL3&Sw1a z=+WmfLGg)bli*}alRriAsWHEi?pBo($(!!;Ro8j1a+1$i^>Q%}Js%dpLKuNF;7m9R z7Fn8cXRF>E^tr09Ggf6DezEMI)JmNE`SMRz^PX2Jc?+C)8QMa)NO_B~m%ycP89c?( z%xAgsPj&p$NXUIR3erGxQ+0xHJU*YKIqMfJs`Pdi4;*Hy)ty26#?2BUYDi=$? z#PKhce;Ijt7G};@z$@WZ@M^dkUIVX%Yv6V8dUyl85#9vX!gcUwcniE0-ezg$wqEny zfPOo?1KtTY!cFilcsINU-V5)8_rnL^gYY5vFnk0)3ZwV!G5p8j6Yxp61wI9zhR?vQ z@LBjAd>+04x54f3MfehY8Sa2P;VbY}_!@j2z5(BaZ^5_WJMdlj9(*5u06&Dg;79Od z_zCzmgaoy*ZKGw{ToL=ApKj%KPdk@$Nygb4{ASJt?^u+ z&ud;yQ}>W!KRf;}@_%*w-{c=A_jmXQ{1g5K|F-n%R`sb>a{M^?l^wr|d?lIOcvuZq zhY7F-OoTOIEm+&q)T)D4SM}>**LUI#q&IZ@M&vYxO<+^l3^rH47T7H<&3LV(w|4wC z)^4o3SG6tq?VNaf=^fN&sdZEvMNTK!Sy~tDu1>#hXx$a>f!z}(!DN^M1270vVF;$d zbeLgj)|IJovK&85PA}=bvHQT|U|-nJ($vXD%YpsXe*pGCI0z1gLtri(sy@T4-9%@h z>Tt&%;qw`7q@_2e8F#eu#$b=NH2OH{U*!0U z>9Yhbh0Bz83ifhKv#wL6pN4)qJOiEy&w^)Lnttb~-wMY+mz?w9`S1d`Qu(W}FSImu zE|Pw+<6k2GQpdl{+D$e|zryFMrZZmkN~=}93SJFY!)xHRK3}}Gle!Zm` z{|4zdqTdA9N_!4_o$Q;jZ-KYM+u(Y*0p1SpfOo=;P-`(`-v#fsG}=9A_rm+&{qOeW@kzJ^J_VnK&%mwlSxc{u*Wc889=@P@+pxDg z3rA6{#o^Zv9{T-U*-Qs?69N%F8vSmKjB~SZ_9X|ZG0vCI9M50fmNY8cw@#_gVkYz zrHR!*ON2FHE%mF7T?f|H`O;mAuV-nVhvzpS-VipjH1!)R-UMBT(`e0LbJ#-lv`6tR zVJp}gwt;@w7Pf=!VF%a|c7mN@7uXecvov$+&RlwE9zBUC#o}guDT?V##|Lb#=@*or zs&*3nL$cGb(_se8gjp~Qds&)#z0vxp&T-g%op?XAY{hf1`@;cnARJ_A>JCO5qJFuK zK2-iNV#DDGI1-M6qv06!nSec3_Bfxfnd-;utQn5CG~=FN^E}mYV_5z*I-{G z`&!L;pY=UP?fB?AZXowYOH=PA^;?U+4&Lm-x<@J>kh_m ziuv(($-kT2d*HqBJ}2*fv?V{0Sa{Kf_-vP5och|2Oo*@OLNv z2il*?`^(Y)wswnj?MWqTSBry{V|mr8pjCzO%GV}(V|%Syb(kQ#26iH>DXkrLEm#}Y zfpuX$<<`e;02{(aurX|6X~u7g)(kdRofg2zR zyHKq+ejj+8rI|-xw0_FV#?FELoxA~x4@4gX2g4ywUasbIh3~=l;RoiW{|3&_gI6aEGN)_()_#qwI}8C8$7e)Y<-b+)Tl!LJJAVKt}% z-khr^SQ@*A{6w|W)oK#21#3&2q*g~Qq*fQbp3}cRS_9Y+HiC`SuL*WjOEYdW>CMqw zz?M$D6Fv=wIC@9vozOeOF0iYU*G+ld(R;w2FbO8Z6c~U(m}=RI zc~wudzFB9w>SvIb>F8N#Va0nndT;rCh#d#}!hSFt=D_}N02~Mh!NHbhK0{PL7k#Ls z50gF|eFPi{N5Ro>3><4|>W))?oh@UJhbK7k31|~7Tg_JfWV9)8s->whP4N>Qe>yoQ z!919+yaMb(OVd9heFpkWN1r9V2wih9xpUxLI1d)X5;z~8Y-#$IqAgI}GDlx1f05%a zmcNADrEnQM1uloD!qY5UJ%)bO|0;5>mcAPM8h9;S1FwVE`+TjdD*py}BfJT&h3nwW@D_M0 zybZ308!WwbnfiAy-koqG+yw6uLz>6kK3|Rd#e2~2_4yLMlz*Sv`)Z1ry?OvX2p@tE z!$;twa5IcP*T=1IYCWO4PZHk(pMp=rXW&-&tflGyocceH{sP;w2A+yy_fG&vumeWGzc#oi4+ zgP&WP{4bQZ2VKv~XkR(;y^8PCbJSg~zF(|K&e!l8cmRH@yo1=^!SCS@@JIL)JOqD+ zzrbHD&76KiI}Cr&tF{-4Hf{jbRhm6gGp+VGGz2wz4#H zY_0lj9N#a$EqU!6y}k4f=pA7vY2DR2Yi)W430*8r-LCSx#e6Sra(YPbY5N;LNq%z7 z_xhUt0qH^PR2YJ3Fdb&VOqd13mZn}WwBE1}JWk{E#qI~Qr47Z-f&HZo@cG&d#vcd= zNtfp72}59REYIk}$Q=$xSen>K^&5pg+R?{QYiz7mLUeD&+dQ+b6X-huPE@@~*puND zI2BHVC&KCQB$#Ju>gTIo0eYdMN6=;{KGV@>p%+P??dWsl&m}g`(TmYaEZeMB-pOj) z&`aS0Sfi9Js=XpR4$J zj(@)V3mku?{8f&Bq5O-ipK!6GUt;YxUlF@Zd6#2f0k4Et!K9hQFG#e|J9+Z)r2dl$T0@q4iE zh4;bx;REnN_z-*;J^~+wn=Q>akD)ydpU`+uVsC*@!KW=vzh{)U)$yMt=Q;Si;xEMF zem&!a?ZjS$FTs~%c?mnvb}IiB>{sDy@OAhGd=tI}-?lXKd`ETOb^Q0_zfax=j{YIq zF2z4`^pDX$fuF+N@H6#ApL9nZ{Pv=Ej$Rn zgWp@4_5Gm!KRW(T@(+>sGyDbq3V(x#;qUMd_$T}e{%u*q2P?riSQ%DM+DKG$oFcpSi8cc^7FcW6MFzf|;!#?mh*cbML*)Rw8 zhXde1OY>|8>A4R^9|CjXP&f<@ha=!fI0}x2W8hdg4(gnlF~`FbEIqAlbPp$LK9h(~ zhErmBHKr;)4gEwo9i9a9U_LB>g)jnVsMa~yGvO?0Mb|_`xHylf4THi9se}>r#t=`^f?or1<$r@8{L}~=;x~c zdDb@d&qu#N`bz9o@IrVIyck{rFNK#`ntGS3&J~V-B{^3~zZ!cryarxtY5K2G-gW5L zJNgZ1H!6M;_F5;tPG{;N^jqMq@HV&}Zm=}t-Hvt#yi@fzVsC-pLF~!zl8m=>^*8boc=rIze4;~M}JNF>*#O5H{o0G zZTJp+7rqDIw>0zo0PRE7-R0;X$^Y2#Kau~b5XVO2%{=%}|DEu!i%{X77?N$Ch zN8d01YhvHP1MpjT5Pk>0hd;m{;ZN`o{2BfNe}%un!eoa^l)`oRpU04s+hYc*f`FVBBJR8Br zkb9BXRO2^8YYtn$mar9U4ckCJYzy1L_OJugT{Ls(1Uthnuq*5ayTcx^r{)+`OY-^J zA15Zm6d17d#_-}MCpD%errA1X4(aNjp{8e_n5lN9T9(>kwXoU=YP}e@H|zsd+tlc5 zY3zQ=&vyJA`TZS#0DT6+L5dH?9s+aWP&mxe)E}<=5$GcwU3V&RwBwHBKSw8RGK&%WdbYhEQxn|x=$XTj>%dBnmQ_z>gQ{idKJ01HB zcqTjxo(<1|E8w~CJWDgq`DhoY?n-Q)Q{shA-bKVNR^BDpmpbvwq+gDHg=Gib6EntD z@M^ePx%XpV1Fw~~2KzdAy``yfgW@+j{!Q}NlDE#$ZG+l}@a{9N_Fz}^GD zgkQnEa39*>R3v8M}&D)zRaztBKXI6JQOP2y4Px@OZJdS{=u)i(OBw zkKMqDH*S?k(Um={0!_&F-vWrS{SVt z^zwRR_kqWWeX;w&Z0P0ZsP)&lW5of`n}_Fn@j+^Xu|0nXcCI)Sdl(!Jy}S|FBgIkJ zqv06nPWGUY<<9itym^s9*)vLu3i1XQlq@VR zD_xp1IeR#!2@fAvvaoEHpQPeQ+2WGYIa5MO!C)XM5K2u7q-XYwdBL=_J^o5LBeVUr zM~o~f^*YXrc;)>aa>kGICyy9kTvQe*^p7jcD~l}f=S~TzLP}avAT1>+rDwU5nna<2 zH;n{k8oI{T=)({K@{6{@h7v zLCrLj6bNXjs2!cBf3uReXqwBqyb(C51AR0vX=kMD6k+kDA{!UwTGTKpPPVrFgC95pJlW6=i6C zDM@L1j+vpJfZ>QU~bnhlHg7@Xrz9?5ea zR#ZH*uw1v9;TDbbz-2Kti+4~~@1%PZDsQ!{lgQZ>8OP?DZdxhFN4 z;!M%MO;2ct&O(;*0-9P9B<)!sGuZP8H`VKLNXOx@zeC&0{mH|}fV02Ho~N67lMIcQl{;I7c|5D<0zO8I;26J*XR(qp$dE0 zS?P&-mr*Cc4pQzPy(n*1Q_LV4SxKp>N#z}^XBJWMq_R?z^#4G*F1xphQP+D&?yh+Q z*sThtWNI1Os(_tTxu0eZowL)XPbIIcw@-l-Zx}O5xtmteI_%>N2K6G*t3WrwRIqL^ z_1L==)LBuwntdeYepW>jGrJYkZPu|#V_JB}-Ysw4ncfSMrAJ+N8975o>w*r=o1dpw zj<=RTlHR33?^cJriN##AVeKpVYqyWrn<{&}-iX-?7nGIe&GlY3nxO8Z_Bq2lGwN9G z1-+Mtj&kj`tJN=OtUq~BPHEn&GJ=|6 zdb(aECO_(#eTcpavh}JToIN++Tv+weOU>K8fZje@mE*Y=CTHAWEoz+JIeGI-N+Y_} z0q;WVr4z`~7Gm4aL-YmLw#c8{R$DhIpszt7l%zj1lpAx77~uE`TK(~PMT@=No)SpQ z(5zCD^g{CvLCmv9Ai9>}+UlXh7cPm+%P(12I@6qN?{%BzeRK4}3Iuuvj`Y3TK=0_= zZE_$(Pb8#+9n>q-ae_ze+aT}p3@XhlF3|FIgS-RkeYNzF3zV3-bm_ZJhQRR29}Sc*?C_aZMb(iva(Dc z$9MX4-tSNDe9(KsS!qe>8pB(bPKDzgvFl^?;T}A;MBgp@O4mnbCS$H(DV5zt-A+h(7n@ z78jK*vp*a))h4!Zz7`ysQC<_bg2|a7j$V3~xM}694iy%rg!5(u^+;v}%U6ArT0vp2 zCif_3$S_^OAtl8#{lmOJa;LS?+a)EOnW;ZgBPqeKK26?ST`{$y#h1^P%ANM=oE=cX z7N&1jK}6FG=rK_&CR8xP@?u9noYpFq7S7kB4(nT^KOz)7Qp~7mhTgLOrCv~vCmb** zyL=4&jibU-T|U3)Hs+4>ezF!9nuG04GAkTPNlyW&1wy)>&X7gNKj!DHW-qu+7f z4rYZjv(iH0^n&!%Xswu_S`|*ydC<|1p)O2$vAj?~H_zKkW@jo|HUEFASP;s{*TRpg zXw;1VWsZ5;Jk2pH?}&=1n&N-0I;X$)T>8(lUzhTwt3X~LFI;fsM5Ai@vCCF;d?Ml0 z)Pk`7#-t~mAD!nB;<4);{iMB<6VXdH-Mf1k;neh4JxBb{dS=;O;#cYR#VV%HsGy=1 z|79biKPYm&zbECEM(kf*rnL;F>t!2A$Wk{3p9&Gwiv%`y7p zvyp%y1->8S++pg#V9qMqd&CDAd|?4C+jW^|ZY&s=0q&HAnpa zuh-1f3#COu;mnlOwCK!Y!ZAzz<89bVrPB2+E(`~CH=?ipnD!qp!{}oel&iBcsHm8; z(md#0Uu||UB{dk1*ySD}9eak9EdN2(TD1bP%q!i zC`(@#rygQz=zpl^Ep$d8-+Z5<^UF74F!*0TA1auizMpy#=7%Eg{G#H~mn?etqQByD zykAZBr$clHQ^OhF2QQSCWT>KyAn3=9~6DpixKJ?K!#>9WG82y{kG@p)KFsxs4dc69Ldd$(aw8oniZ#o(=x)LeBN_MN)@m7A5X(OC3FnT@P5+=Q@Kz_s-d)F-O;0e)H@YZ z^9yw8GxcGNJ>ax(Os#m1(Z7H=ze~&K7)lRj=?7Ja*9dcrsmG{z^i%fUbkV+9`5EE# zqZS>?%FmRTf2=hhU9Xi>&->d z>C)*lO7yS&Wz*;7%`PdOz9>?Ag2nm;}IU$PeD&6__rk~D8#Vg9hf$by2>qWNVS zX?ISf_&?Hzs!DWT@wxgHHFII!%*cXDRn&E8aY&ki EKQa5Jod5s; diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb deleted file mode 100644 index ed738bdde145082d329a40b878618bc5f4595932..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 33552 zcmb`N2Y6IP_r^CNfrQ?BS$Zg0wl_r$q!(HOh=_5MY?6g!7dHt_MX~qZE1-yq1;wt| z0L5Ohi(&;E_Fn$)%)FaD@Hd~|_dMb={?2*Nxik09otZoLZY&l{ipA3LH;cuRO186D z(){q#=%#30dFNeQE_($`&W1t@^^p(_) zvsgNDp6Yg2A@2l{cOv|ggnu%RI|cfwLO%`u>B2vQd1pdDi}u+fehzf>wQln~p`TCf z0`fxgBJyJL5*&Xi$`$l4BQH1Xv>LH1sa++st8u-E>2|M0&I>5;E%#Ad1ADEpbCgg{(<~IMf@*gr$AQqroiZOP%@sEQa%*B); zC>X;jLs2jnQ-+~jVB#rO*uzoEQAVHyQARR0O4K)++8D&g(#{g`an!~mK7sZ`GF#-? z5T8WdE_4SqC+R|-Tf{x^y^Q%tKN+xCy0UK)OgRAg2a4k-!{_}?Iauh2K+h5W6xKNv zdals(;O8?|AoN1$MZzzJUn2Zc9#ck6Lwq{QOq3Zo??pyeOqrFm^_Z-Hwh(1DkDWu# zHPktWQk#do!-PH`{^7!30RIT)Rto({Y9Yj{gkDX(h72QKE8=zV>xCb|wd`Q7x|U;6 z8t6BYQ8H$z&wnwsqsTapYogsuwvbE6rH1F%OpJAxa>rCipQ9oPg=TJMBJdZrzV(G@~)W=^){UY*W z)OQK(OAU42Wz;T5{0gC8N&PD5SJS>m#IJ?En)(`{ucdY!;@1m(9rYW?_2iA@P2|lu zZiBJ)`Q2jdZm&bXjXAfA_(p1XAbuz9O(K35wYw3&N9gxbzmL2h@ds!>DDobnwi)q< zX+I+3k3xS;_>VJZ3-qnV?#}t1@&xwDW1&CEob9lGM0rXa|1|t(P)$(+AXhb5_}roe}w>PkhMirP*i)5-Q^2jpeY&NS5d z9iexk-dX5fES4TI;det`cj5QoF+Is%i1()5hwN*p+v$gRf9eB-J`nmK>VwH4B0iMb zFyvWj4<|>EBgs+ZXhYrZ7+eSDfwr?y$9$A=^vA=VN_zr15n8sf^)WW;lSn(_4iR_4 zcQNJ`x(B*f_&)f4<^_Zvq;>$}2MT>M{DT-fm^?(pbEr+RSbE|*)Vb>O&Bbwf!q10a zApAo3MZz!UaV2Ca;$^g_k<$%z{WGY|BxfPNT*NEj&t`0n(C0!wl=?jKuw=aFmDCPL z-U5^*^pAjDN&84LWT?-l3h`>{HDp-CYZ0%bUN7_r^o7(H3B7?@BN;^==RxXXk#`jO z9dkc5j?!dw?Ki`3N&5O6mx|+-A->%BsVjtjG__;MW0AK~#E+wXJoHsUKLP%U!as?5 zCzGcjek$$LMBeGp&!B!Ld6r=>bx+T>STxTu)YoyI#nNsMV&|jWz+)~TFNAgx?TZcd zF_%!g6!FW1emV6kpeOGARU&>h^lOBFE%R5CYsj_8yN>qtu-6&8*IhVo6nzfsq5p(( zBgz((n~bhwH^bk+yjz5RE4ACm+sTc{zk~Lj!?&n?P z2Z(=2`y=vWL!I{t;-6CAO=1kU*7F(Sp9}vB_+K*bE1~bD_BHtp`7QFlqy0VXAB^2w zwfkez*5~pw=8w9jU%)8%D|iryYiidW{GIjuLH=o|kNwNo?JUSmN$TxVsUx8B+NF`{ zWP7p$Vj0HPd71D#GS-RYIBM5L|V5cC-d6%q1Kn|hrIqG zJ^=ne#s&#}Fts7%P~;7xZ54ULsf|E&OH0=8s{TR8JJc^7P>V9mZ){J^vXfGj`io9jimXj-xe>Cl5$YTw4y(?CR@8}?KEc`C|k>Zg&XLpy`^nTGoKv#6bo_&Gv9m->0o&!>HXh+jzU zBE&Bi`X%r$W$ZHYa`FoDO7bd0eI8fixNC%et+D&v0eub1eLQ9@c^!E@xsJR6x$BLs z>%EcsP2|mpZ=ijP$h#H#ZPafU`bKK%Jnj_wChB)VzuRKz-48JcKA;T|7qqu1N~W{KL`JL z#$FKmi?|j!K3_)OE6jV9+(EuZzD~YjsL$g~9QT&+-)7D`au<&Oz}ULJ z52=3y{bQkjg7f_X+NUVHjjnusOk#e2j-Sp4QB%2J!tVfnCFJQU2iVx z;W?(|p}u_h=)1H66wJM}Lgp2b#bgO$QQD=j%VPL{3Lpgdb*3E%Z8}*He#>3lU$0(n!C- za3K0l6)N|Ipn$IdF1)z1>}X~MdZcgB_!s&KG(|(wS76x z^9t%$l2?&elh=^flB>xz}qgypg<#yqVlU-a_6=-bUU|ZY1v@?<6;o zcae9K_mKCJ_mTIL50DR%50RV6hsj6CN6E*?$H^_^R&pEp1o-P3FHvzKy&;X}?3h3vDOu_YC#1??c~3 z{R8qt5&sDBkA?pU{7;3y8~z?1_Zj&)`Gv^)5_w-y-z)U5seOa^x3s?_zbAh%)aUyn z@_w>d26YC1CVwG+C4VD-C;u>1$E)LXJ%3>>w0Lx#SH`*_ue;EDQ11!77s>#X-Z2SNQ#?_cv5|s-zD@8I;u0 z2g4sC{Gp-_tI<_|qz^|KVf^%wi;&kqyOE5NF+*Md zV&okq{5bq3;Wt|>Lx%CVC5WAYvXuTZayhxeP}h4j@{XZ?EV+_Ajy#@RWvGukf!c}W zNvP*!+NX%TQ>mRso^CkgM&zF<^3G!XZ0P3*{apB%ue#0ig?<6_3x$6X{EL})37Ht5 zmx;W~k#_~-R|@?qYFAqo=jE zn`v(_)cP&dZbkeyq2Eq@BlJ6rJv4FrCdTd}?Z9;zn`jf)n4*x0UJxxACJ}dH`L*Dbke}OqKk}o0tGVNDH z-mB1eP=AelUBus@_NK)$%t`xg@*VPBLw&tFsl7+OkNS6s_y_Pm6#hrd`* zCijq^8R~XEN8T64ABKCZwXZCe4wzr*d%+RZzJ~ve#nS#y`rpF;j`sKD59E*JPvpNbAG@xQ_EPycuLf6!(dI`1!|wYMNQg?6f;innivk|z9g=HP(#9cX8enPf+@ z6WN*ULUuLO^>;%(-KqBwdQWP-$lhchXzl!1&7GSux1hCYP)P;!`v zTM-{FeBPJ#Bbhr2@zEka2L4#avdD2FJ|6K2!k-8~TlhBklX#q+bdVgs`n+A#+=kY3 zah#W$kMu)}paf86q6Ceuk3RtZfy|ps9z-52@(w{>4)rPIR5DlOY{>Ck5wdwAkr&tj~ctRQC_>f`57n@b*wdgh7vVescOb~w2}#E+m> ziM%6eheW&zdbRLt;D?!4OV*L~$-Lp}{w$=vD5nSCYqxyyK~@vRFo>(>@W$Y(+VV{>kJiR*op&+vE}?!Yc^P>*Vrx*Y5cyX! zb`|uig?N95g$y!)u%FZ2hfJxD%8ZbtsYv>!3l?L7+pF_bMRk6SDw zIW`p&cKdBS?g`|4fbt~$?V_Hipg&Fh8S+{3Ir4e(1@cAmCGutR74lU>eV#k0y@qyP z7y29U-xU5^@ZVS;nxhu&WJI9TUrkeP^gq}_?^Om-o=8tUV^QR`0jAbXO%$lhch)ZdqO zKSNzlf9L~*KM?*P;SYvCgvSjfhmlrtxS^_V6vj`7ktm0ujADE=IffidW|8B_@#F+@ zqM<&|Y-%>tJBhZPbdXL%o$o@Po4SYel0MQ;2FReHKK=k4e;`T;%4Eh5A`d1H5&1dP zrXYW+&~xGEF_tg%0&0bb7YV)C*rUVHONCwrf12>88@s~{=rd{0BFo7Na<-wqt~tn` zOZ`xCUNSx!_fO}|Cl5zH$9;z*s8y0jk|DB+tR`znT#Kqx*ISGF>S))K5pp59h-@Gm z$tW2!)b%Y!eMeD`3%vBhES52e zemW7kmry^6JQ><4v`-~ZBTpyKFx1y|CbhH3v&nPFbIJ2i@A<~o$6WybLdGr<`o+c` z^C;q%qCAUo8Onnwmoxtg@=Ee5@@hj}|25RE#c`{JzJ~f*=-1J{o?J)XK&~fmByS>b zHq`BGpmqy+D|s7vJGqg(1MS|4axeW&aIxU+UOnLVq0k7U6GY&Nk>z(0)?Hw^Mrx@uz7&V>p)gtiyBEpC?}+UnE~5UnXB6 zUqziiqU#XS5^J$aJ#3p{hTl0}3iow)Qj0j$|jYGuegg zisQOjELn@e?qmF1LLWFd|(GPbU- zn0g8HQlXc@pT^j9at4{Gdlvn2vVxp#sOz6YZ7z8z>YYdXFmgV5xS>970ktELUrGB& zG9>b?I;;D)cI;293}iXbDGFz zvV~kiF2!-njIHZgZtU?Ks~JZlevF783xB2XkAr`_@K?bIl6 z6Yzry$#SCUtW z_|?>|F&w{({%RbzhW1+6PoZ3gau2oZVXvcogQ4#4_0Vq={!PrencP6$Lf%T=Mq*55 zY&5p6?+)sBlAFl8$h!?EaPDT@OZ`67b3g3|40U}ELVt+*W}!b!?GeNu75ZcFA7^Zf z(6>V0Cj2Lu^CY?5Vwu3c)$Kk_{TUqhtT^sD>d%uekS~%iA^&A#>v~?H{wlcx@z-d- zPQF3DNxntCZK&&c$Ji5o!|^*Af6rpcM7Ve=l>sCch!SCBGxTCx5{4Khpk*{F(g4P_;909`e9U%#DoS z8UF*mny-J-{|o*hC>E4aM$1egQ=zppwyG~P4JBRp?csM2eg=G0q;orxokYAd;$5hB z6?!*n-4X9WyQhfvqShPnKD7Ii{mA}?y6pkT8z}ri%oz-Q2<@TdFw#m6Cr6MY4JYy* zWR5nzZf6X0#*$g6f1HSqr#=DtM4@L>w?Uso+fF)2C+Q;HhPqu3j`LFYk$y5j1`T!o z0n`p8C*$~oMEqc5XRm;sBlIcor=o;;TrQbM<|9@>yO1n0)a@1{UP8T;EEDl*)TWa& z$eH9UvYf0yeY1_N>z_k?F7!i%j&ZN!he4k&^uysVVC)F8l01?OkyT_hS!0;}hM}&% z7WLH$zn-}f=nI9u2!4a`8{tQp8zUEs_)&<*sW*|$WQ)jKg1n{FmkE72HQY~qo<}3^ z7~02*yp`0BL;QG)#fCOASCJ=>Cz2;ooWO%uVE7w$iJ8N zedPV*1LT9`L*!=iVe%0}bsr}sj(v==$5H>5WIS^#{B6R20{)Z2-wyvN;XiHc%xB1F z$>)&wJna|A7Y+4zeF^cGh5w4hVjsY~9pr1|>m=u8=9}=tIQ}hgIQTZ$1$@Wk>H6P= zzti}c?~(75yF}gx)IKCX!f_wd{)GIL+)eH=)a`tR{Lh8|1^h3W_Z7L9{F?lR{FeOA zP}lc8wI9eIQQuFreK#{;ofTgj{HA_{wMsul72@E?G!MTb~_PIgPl&lJ=uZG zATvo+W?v4w6WN)>xoBOTQ&*I3%vXMQlpgeZlD){@q{{01`HoT%&VI1ptJ zUx0Wt_4AmRtYo-F)>Xdet7LOX|?LaKJBqT~uc4|cxr z3t$)0FA{n&>=OE=WEnY)oKDUlReLj0X3;MvD@YZejWUP+T=GzI9(fo!pH#;kPJ01( z1Z~xaDt4ry;}I}qv1nGot~Pqd8WC6dwJ0y6)S;Y>Qjena2+Gm47m|y>2HGm8k#-b} z34Jl`qsTbfL^hKx*&jZ)cJ|Db*wmOdsQ7)o?F?k88;+LXa zM*ni5UqSmy@+$Ib@*475axQCi}9ZltfYn`qxmZXj$5qr2i`E$_+ir6>JwA6qG~B#27GE^a z3EyoCK=wPcrBkvv*3=wosH~3Fgs0{?=H|!4brCd_7p-duMQaM1BaPt|v1qs~uO%K^ z9G+y(W4AS>I5*s6EsI5>K$j`A%dPzj-42`AZuhvYdm)&QLYALMfZH0gerjE5{R%bbmxfy6;Re=Z%D@=Z2dS9gH*a`(0ks!5HuXyIqeRROEDcY)(u9htrJAS&-tYFR znQh0(yRnu`o`;U{xNR7=PLD(NQmcbToNl`pQ@gM<)>sva*DSZLEaA+a6>AKmy{Q#p zbzk$aKIP*~eF047{A`Q|jF`#u{4N_hSxvTpGrQGsVnM3Rb9nrIjL3q5P(!3z4gJIt zSQe_m1*xvjX%TbGjU~?Rb778|aqo2c-I&)4ECEb1ta?s2)-AU;dmmRd6!6<|!(zpA zLrwKq=hV$yIpQbOUofLS7LG=iB_^@UhH>t~jX(nqtaCoMU5`suqaSx6fbr=w4bU}L z;>U7>1`2ZG5iI4nlGat04WO;UDPhdvB`sX~3UDEgz`{)QGIx!y2I0Ohf!6)?+o~GhkZ_Tyd&VnZE)DJHlT}K z7*41&>0tGC`JGM&ZgD|D%fe7R)NEZ@n$r{sO)7~sEDAM;aOy=f>UTZ3-deh4Gg_IW zTe0K*dR%H2IP4x<09T+NMC#e&$8c8j8(kgR>s?7neJExv$5f8ip~;FuTo~?$%Y*;)&~@6}F3c}n7$%4x0~d>k8x6KP zs;itXhYQ!0TcIy%?1`wjcxo(yaoik><2H=huBvs|@I*yj=r6A=;PpCi8|LXXTR%}< z=r6T^YX`TeGT-aQO;N+CLM=78lA0Fl%CS9BdtOCs(Q-8zif35+7oe}bel;749C#pk z16Y*x4b}?}29(cj^WvO^>$6)cF)tn2ey<1jILlIOU0JN}g{~Maps6;HgFGGJ=}STx)`#ba6HS!HS|XSrYM$#Risr`R)mYBMaWv2eW6FcB@dwndrVv{}r&IN| zI)AUti5um1+uXP@!o&GbojVI>Z(V{aPerd3O^Jt^w%n@vt;>`tv(yd2*!KAXW`}{b zBY-(>77VQ0xZMs9YIk^0uW+$dD)QlW=sS*?mmjVP<8D}UBe*M(YE1Q+iCsW>C{{Tm z)Y70ggy_U@Bpy@yPByL>nBnkJ07)IZdiD_V>VcC=1hVgPs6kG{r@$D_pKN(^cB#s-&(avY`raBo7d;}>dlCH_<3A@>>N-xE({x8Y<8WvC&JTr z#NB%KS?wZ;GcG9$FN;*itW)tWg!;=4Rcj;e2cEHJBH~<~KCB07qWIN=7AqoF8FkZz zt2aG(;c#LJ$VzW7wwb2>N>#s__A@Yl&^v5y26lx7Si@cFfy=ELZiX7*sSfp0g*O28 z@_`3c_I^GlCfc)OV&+ecHEh|0XZy6opf-)5xvbQS*lAg*>#XCmQtvP|V*`uT02ih@ z3pGpMZ>`Kq-HFRc{VdT*#Zy}v>RUq9Th?HIY5E8&k{^RZZ{7la+zza`s%~uTu|8ta zb>Xb-p6va6oVDA97f3vI3$b@=sKa(PEBy$ZYJP60p#ekKydYt?V0(`zzFG_MV72Qh zG1qCwMjY>Acon0GTcP{j=Rr%>SYAbV87eUgRNnNa(4xdcqYx{*H{kN&^#Lop+MD6s z5oer&zQs!y3i>$^$d)diNf?wKw+l}WFM2ww{RfFbsUA}1ftO{Oi=MTFP}fkk2-NO{j;Tc{t^jMew9eEZ%AvbUXE!#H4Wq zut=cAuDWuM2QAuz_Mn=e!JywXxt|(F-@9Dul47GC<)*oybUC_6E#On8O~+8g8!&d^ zSilR=DPE`EW)9O$xV>&q(Czd(g4hKlE-ESh%?0V}!*kK$(i`aEe1ldfde3 zOv7tky>%9L!QpK62C}%`Jew0cZ@e)yM4}jl({h?Zb)n^<>_)6=s`K@prV!%=FUop2 z>P?Nyr@N-FKDCguCY(4cZM**e-IZrmh%JlSim0U`dAEyl8r7q;1{*}&j8gMBMLTwM zMS&Y{N&&BWJ5o)$f=;*JVVb1<4^8IcwqyL3nSM}j)%XOH{8-W2G&2;!-3zPw6Wp^$q{s8v;?t~R0o|QGc5L3|KB+0u*mn}8;;t3&&-Lp zL_^iMA=ns)YQjzR+*Ov8gyLH_G|E%KCk(eM;7}V3^=xwbv0dEfRPfU22v*nbcPdod zJQa`KqdyfDRbVrS2eW#f>5Xl{l*q!+;+8~DIIs|58-o|ILLUYMR=oXs!i8?a^xv;} zs%Y!XPCs!b&_daEzRZXF^`4RygB%=zxUvQi(YVH#Z;43 zEq!jRcut&1!0ikN-L0LlkJx4(xUn#)XIV*Es2)u<)noU}UdZaSmOYTwX(LwJP7mp6 zfcNvDD-_n#pw<3|tIq252j*PoG_0_l)%O{EHE9Y(lQ-C@-aYX;tu74@7QY)`j?DN9 z1aWzoYt?pV(1%slboIV!n-}M_yVR$`SlR4m^y7ZReEJMrZ^PDAc$sZ%!B@CMFXL2v zUWYmrzuRUHc=Q{&nsusCuQTWj`$IuLcDcz@Nvdt03aWIfv6er*Cel=&=;D&X<=7x^ zT~)PpRZXIOwavjbo33zsy!&(o9yV^L!xaoy%l4CMo9(NYBsK7|I`vGJiWji;zzjqEIoKQXQ6;)4?Up35sf zGrBN|OtYT-n#FR4?~L}Kcb|?6<$a{Kx}wl)cVY{LI~dFAxgfD&o>mqL z<6~`0c6I%>4cpFcZC!nf4LHmv9rae?P@iZ;>!Io(wj#Xo=7hA=X6yK62p`>0W2_** zWl>8^J+aN6y^Yrbx4kM@jTerfJLCz5!il!_5!-5Q|JQ=QeI5=%SJnjmu39(RNmNgv zt-U#SV*l@J~!yRFSA%+yCQ3z6e|T`pBXXeg))kR6oPucNi=~Me3I} z7Z!H4wN`yti=9i*5vs0AyyhgO>i=*_g?4=R#RdjXw!*^LqDAhWoj3velp`{g~U&cK3tuCVZ;iL$$~G7>LM|9g3c-@#v%tw ztKe&6&U2vTsd=xiu(dh!%S%?Dr_D=hR-c{NpY_?pX52w3|r6Aag2^oJdO{YHj0UI=Zqi}ytQ z(5Ii#?RW6gL0i{^_iTqREUt+{TXp~aHMLcHAQ%c&sYSueVEoQ)gny^@P%wZGn4)$H z|4QvHeD|mg*CuOE2>(v)u3*6BuHyHNY_I69%hl{jHNQ;s4_tR8J~v zY_@1CH@r9$Z*B}n@m+gX;uBypj@7lOARbzRcepLL;>66CZS`XSwut7$)Q2ypPdya- z;l$j*8VnPc3(qXK{HlUgu)L)KE1~}Kot1VI+s{h7 zA1g)LHg<(Oh@S#@;QP3JVK4PdkS zaYtQrVYd3y5xn53n}Kh*0sYP0+zh<7S7CQw8?Ff@E;=c+dDUL0SFiiHA^B6oq4<_H z;+KVdd~?D(^$PV%;xyGK=#S!%{(&KR7x1ek{vbe~5uQr;u~qL8hv{B-hHKn-&GNYt zj|X)tl49FEQ=}iT>Jt+>Xi&2I2h2pnYFuUws7Ch>i0M^R)f%Yr>209i1GNhOG`R-c zjrs>{#_b)jLp{UWs^1&*`?2pyENj?47@@5zNGzKFZT*3uM}6{1JkR`CHns2%b1Cb; zt-m(t@r0eNi;o|>1TFlJ{Zm*G!|zNjO=>*mtMQ0Cj)$TX8`A=8tMIWwJ)j5UOB&vC zu`zb}y+J&u^%su)w7*^R)c0NWh{l(s>DBn#u23|heq-jJE#NbQ{sW$@%yZGc`oNf# zxf-J&bAuiQA$3!{xJ!5$(4(ML_@}GP`~ssO^E-@!j_uSaXsiAJzMeSoO4(XJg@3wB z`_-SHi`{_!$x;0w6@FpH+q3#2h+hivyH7yQ`Nlx72Dpr*zlT{kvhMLOo z;#hTkT54VD_HJc0*qu~`n!=UM%NK_iq(v4-qO~z=WN~F(7;~^1KbXg&l}(|>#SLLw zV`EKK34SQ3jz<2oWBot0JaBRV diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_standard_sample.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..205bd77fd53e24eece4bd04460465da2a5dc4b78 GIT binary patch literal 30105 zcma)@bzoH27Vd`tL5jOGcq$3wQlVsG6Awwyl%|tpNQNXcVI~1WHFcvxl`0e}K&dNr zr0z~zN})>KUEa6%{-(2f-}~d;d(Zu?^{ut{?6c3dbJAKYmTneH-!Ci{OB&hTV(C=@ zzX$!EWIEZ4>`nF|`;rHd{mB000CFHXh#X7~A%~K~$l>G&awIv598HcP$CBg7@#F+@ zB55Tjk(0?OSPx=MTN-e^gDZ^d5?o76zHW0P{3e!< zkV~PROuLzEf!1nlT`megCj2($ErTAXogmxE4ntjjIkgq!O0pB>SJ6J+Q0Jcj{Y2^~ zrS!gkP&odk~7y2F4?j-Lb??(B1MBcsB?}L87&>x`wAi0fv z2zd|Fe#B6>?@?-xA^y0~pD=d60Q4O~-wA)0@Silcml-bQcov)k-ZK3AB+6H zxaQ$H2-j4``@!ySY+e5V>I0z<68d2HLxevR{xIPWXSosNNOBZ8njAxpMfq{G#~bS3 zp8$QL@U8GC34b!nOfl?#AN^^_n=bNZz|X*SG~)-uK7{t6BJVKhnbfn$nIdkZW=Ecb zwo}Ai)ZB=BXnRQ?={MB(cQ~~pkbk7mkAgqT_yc6W&t`r$;+XrojvO-AP-}VA^2q|^ z2Wc0Iydvnu!Y_eeD*Q5*DJLrspCjUx)T_v9#OJ2s0~+Da$F&UCf|TBEA^b%wQ$rpj z;)@Z-7*+S8@28eL4tj`o9a(Rv^BSmy$t9#}N0Z2lP+v+mlPzQ`8AZ7m?KZ;!&(Mz} zb}sD%*-myKww(3~awXYmsN1`W+VSKGD0iZWp9KG8;h)01F7j0JH1c%v4Dw9!Eb?sB zcaE{u?+?U0?RFlnjkwNF>D^Ytzkp>f6#7NbFBbk9_?HO(QkJ<4`sG5ug8G%v*V4X< zyjtX4gZQ<=zm7TALtiKK_3&>H{svAbuzHyM%r>^n0k^OD5N%`$gUZ$a|3SZ9;#D+QWDzXoLDK{d8wxjV^SBK{=ePYM5N<|N1Pvxq+@;?Fbw0`wP!{u1?<(Y`-uze>JFzHX@7 z{RYavDg3wKzsdk^_OxtIKa+(+&=)W7#3%6~-tW1)Xy>_L@?e`c|C z$1~}60I$yYt!|%#g&_Jm%?^GE_Fx@f!T(y+^9}WH$?p*VUc`TZ|D*7Kg8wsfe<6P* ze7R`KMgC3xgSJ^x`DxvZod!{rPwQ?tXbt_I(9&^@q2G(_P4+>oud#Lc zgW&fQet-A_m^+XhL=Gm0kV6f1J;SICNBI##9|?aHW2000AdJVfvDC+rjt*SK z^h;pZ;VPwHMwT1u{;NQIj_@ntS23@eoJ-Cl=aUNzbv+BIEkgMk+Q*3aVrs`CUMuwD zsD}&(KTp3Nc@4C~xGu!iDe_k_emwLOgnlCYlNdXhJcaBMdC6ya8e^xEXOL%-XQ95cjjj9R z9O~zi=OKQ+h_8l!f$@j1|I;plb_4S-hP_7QUqby-@-oCPr+o!^rJ?@)wbZU6uSWhg zw67(vGt~LlQ(H%_M?c>w;u}zYBV#v`H<34!w~(8VdmpaN#@F?4VeD4uTWQ}$-Y)X) zKwSRrUCg-~`aQJoP2~^m2mSt(pY{NA9)!M)_Cq56Ftta>N0IlKh(AvK335B)J5uqX z+0=JIe=?<~Jw^R#=+Dr8R>Ys9_B`S*(0-A8N#wmu?G@y`D)iUjzb^bYnD-|67Wp>v zcGG@`eAiGthoKmAX?w6wqu=y%d7s=1{R3RT(BB7pKkX06j|_FWkEwk^ev16hMEn5! z&xMcQPWzI%Uy)yv-;m!L>iWN<_C3n|AoL%p{{;Q#ls*)C+OLfLM*dFzLH=o|>;DV+ ze^dV_rFXZWZ@LLT4L(Fwrh5;vCz&qtdLiDMdLN$uWky?PIBpBgc~y$cdyC^-iKanVe#%>zPVzn#D5g zY@yGfo`LcQ(>_GR52bb(nTfnC5uXX)#+aRSh`5uQi$p)__IOAy%KMD1^kJBL-4Dk# z2iFnIIWiUReiXG?h#yURwuob%cMk|Z2Y#;b^Wf(TzrfhtgU}0wUIf2b_$ACMg^*Y1h zjCF57UKrN}j4y%RNV|!QKwCw7DcKCIg?6i<{{1NQ81*)xFQXPmJR$UU_#MJu&b$@m zO2j*jJ$#eMJ0AK8%sEl$Cs8|@JcaB+{;9N2Gt})p9r_v6&m_+xIUl;8L;qYuUG6-T zJD>V$!x5OnI(8vqH_*Na_QkZ<80wr$s9lQqWwb9RuOP1^*Ba__S0Vpu>erChiuiTZ zu1DTFp|3ag2#lHT8Q4OcS$5c3`;A3^Lj+KidMgpW26pBNrm?V-f#^@lT$UJZS&@aMswFZ=~8gL$fdYY|z4yklrDCXY4Ld9{cim-0vBo_o~cx&l`{ zt`k|N0d_c**JBC%M&UOxFG4OQo5>ckm5h=xvW;AZdgI2{?MqN^hu$Id<L&^PWNN37UF4~#=QP@5f^owb)5%Ei)Un=~|m~*+|=x^v>iM+MA?!tAI$iEu?HO#qI=+{BNUij`g z9`asM?mootr~Ux>ponin{2}TO3;hvlk0SmU?Z?R{$nA#ues&;lr|@^de^U5Q!GBu# z&!Fw-GhOyMizOX%w8!%nOHVb=UcmJkt``}9iF{dKHB?5{6lIVksp(vke}jylVk7z?sEj{`5f%QIG$yX zFKK^esQcz?=*e>5GX9;2e^2cP@<-(VMEhs*7eig&uZaI9{NI`L2l*%Bf6@M1bV4Zr23r6GjGIq7_?Z|To-wEHva&FQi;$CV#-$>+{}SO}3jZ?UUvBJi4?(}u*ge<6zK!-(u&);R*TBD4_}4M-dU74&>qYzq z>KmYM6#9+SZ-Ra^?OVuA z7kLj*e-QdMp{xFRn6XEM{wTG_5Pw|gPZ)bV=4j6y*uO`BJ8|t{{w~;0(te73ntX_W6zDd3%^4_Mln|uf5-WBopESB-fXZ=26 zU*Os+;vc}@$Flp$4@LYVY9EuIke{O5XS5F(>ihWIVwup5`j^Q02iI5hzb3ySzctk5 zzC+&k!vBFeKSKXW=s#2c1^TZ-{|)}{jQt_>KdJpi{%ts+3D4MK>~yv_J&k^ML)ETy zjMwy@!bhR>Uc&FqGJVLth#y3|pUCSEeE{`=(~=Y?R9uc>(x2 zjO7YFk6J!iV6jYGE8>OJi^yWsQzGJ})XSik)2 z+#R?U;p)IuBg!2Ef3fh7Wtm#?IK)G=>qK5XwFbn)w3mo@BXrCmJq9A=QnHzBAzKY~ zZWQHW)Z2u<40@b;f^0WbYqBo8961-^T48*xt%Tpn_$u;v5kCR(6NP^g{F9k?3fU#% zry_ou@K1+-2J_Ao`dQS@CeI!w&Rn)I0uMzQUsa=P>>uIke*NeOx5Z^$3qtI`pb`yCsc?Z%^q~^mY19T)!~B3-*(=pMt%Y_S3MRq5Z6(ZufK0pQru;`6Bre`LdzT ze}&qsBk`>8zZN7Ozb_mTTi?!#1GdNS{0 z#y=rHB|jq%ke{Rc7sl52h4HQV74)x#{tfkS$?wSTk@thh`;q!j(0?}eqzr1m;@{Jo z4*fT90JY!AKgd53|BLqD`NXb^7>Kh zZL6M1&%4&giDyM*s%84u|t zeWaf}++vw@FYO}@bvurtHj6wO_0OiAO$H2geh%Wf)boU%54}M6LHLEjFJhTu=p{lg zrCtWTJf%;qdaACgms-`}doJY>2#IK=#t63HeZ)R)@d21@(Yb)ZnQNNwMgS=DZ-9_zg z!#2JwiSz;*TNzxbUA~&UWZKXzwI-N8l_$#zuC0`?7C*L67B;O+6Hq`a(ruGi`F6wy7+Hq`C<2W2fOzjrrd z_fCV}UFbdF_hc+x=)It$0##Siva2fZ9OB2MK*J{2{_0%DiFFhYNiK z^^wp=(H@{bEaKy6C$$NwyxtQTvm$R2?aAa6aw<8EoKDUlGsuI^IFJO#G@h}quvI6 znX#ulMlFGL0?)8_JN*vW*V0}Ndj;*4WGA`GP`CGZ?6RFpeS z=%-UZ1Nxb?&l2&op`RoCbD47Aul!5?Yj*1TrT`8m~$oc zwY0AyuO_cCoSK}s*BM{ecRk9jW8Ql5266+rk-U+-31#-vz8Ur{xOUOs1bZ{>E#$3+ z`uDaXew*-bhkpn2?iBi6)b1wlLEgQz?<4OgA28JQKS*sG`4IUq%0D9V9)+JAXOA=I z339v0+d*xo;nd_9dlKcJV(e*={|xkJsXr(5=c&Db_=~h(67iRzze4?0@-^~x@(uD$ zLw&z*QF|NZcMJU;`0on;J@|Ws|31s@h5mui_ZfSty1x%G|GI%6G52Hg6Y^8?Geg~; z1IYiJ`WHg~68cxd|C%}9K>t?g-@*T$u^-4E$)7~t&(wZF{;xv+4gT+p{Xzao{w4DM zM&3Wh?_){n(}v-m)2MfcJrP%rRJ>15`00%IB72jlNcBOVzJ}U92!226{mB93KynZ{ zm>fb5HPrQ>U7EvD-w2_Pq&^DzXxd}QvE(>IU4A_BCZzmn^Wj@lexFG!Ga336+EdAC z*9zq^U9!6#w>ULyNn@QS8JLw>uq>FT;T^`zA(ntEq!wsixGSt6!Bx6UR z{4Cl>le5WeGC<~#xnv%hZ>Z}npcX{Eg+ecaU(8quSt{aX)XI@pA@n)$D}`SLznZyo z$$2E6oo>SdLv1fa{vzSm7<(G#Sf9nrKbEW|k0V25ouRI;o>~LShiNYn@kZ!P!jHgT zD*R^nEiBh6^eD9$;%&5-k#RC%sN2y_tpoYXX|EtxlAYu#@_6zD@q`1-$*ah#$!o}K4Rt%OL%HjzuOru!H;^01jmUk9_KoCC&~7%iuJ0E3a=vYb zzlFKCl3PXoZPacj??C>YB7PV3yP@AB^n2moC;a>2Kfv4vg}x2?L&AR;{v*PF6#io@ z_c-|kxt-iW?j(1SPa3NGo}L^lPc!xm>U}m9@ADk>=gAikf06b}BJXABuTX!Ld`-k( zr}hThlTpPsz{7 z1LWr*#!DaM_r;v+^Ck7Kz-stkgNK3NQ2*9YxBENj-&6mA{87YzqV_ZS3-W)3JqY}b z+VA8aq^j3}%PD*pZ8zwl?G>2Z@%_WomeE18fsd^UTT13BwJce9M9!u7e$B`kjj;tpe$S}EtRKM4Vt4a6~ z*h}d*lPx0NN;?Y1Xt#;@GTL!60k+fbAeWQs_gCOrNxzd^MXLDmv`;Y9|8C$(u&>2+ zGGi+56xvo5@?q zO{B`(jBAVVZ-u>8__x8no&Fs{zmxV|;N7(E5%GIz-v{0=^ap4^2yPSlL$DvF|A^2Z zrTrNAIPE9M?c@$}C%KD!(r`wyf1jrQ45{w_SzOQ2e_rS>z^JGZMZQh$Cf_07CEp`eyY}FEU-)}re?Wg9xnIOTr2P^3G5Cpye@go^@PN=i zw^(MVy78O;{m+5i`bc|cMP($>6pLC*!qtgLXJ;&0P#^1vw#PdIg(XFe;aD`(8m^5r zdOS{RSx#=A-C9$S7j0|_MH{S%oV>CE zYgcJrM?BURo>>x$HpHT+Gru$zjmN^MbZ!+Ybvd0*kH?QnosKNG&F{3UO8t&3yWNjU z^SpM4&1YQ?GS6Q;^+;DVx zIIe0A?%THBid$$zv*(qeYCEcP*->@SVaxKnZ7x-}-R8}*d2v6e8&$jA)|!$z9s9Oz zK)JF=vH;-Rs5}^7>GHzSHKg;dg2ZD?>{|iT0*Y6m?e?CEH#cYiSO( zhfr@`MJOIgpxXROHA4J8pBs-0&Gu#a+)fuBX1*iK>GJv1qjcMBXmi<)3tGaN4dKlE zcqm#Q#y}~|%}31~D7r$`;_^gOXhjHB=VYU5j}2Yo^{J|zcz!lKIMpNg8JFMf#po-s z7hv?2N88aL^(4#7#2~4T#beQSJxD6T?eSPk1OsVgAfOuUbYd*K9H=?dpXK!XJZN-b zrpt!WX!BtR7P>u7n=Mnfy{IA_$5gEAs8>HxktYUAWw<`FB#ed!OG9xqy`_bt(B*JB z(b=dUqtNNMyPT?ipEt|ncY0jt7mve*j;aY3wxC%t^k%|3(b_f1tLut%hE|1|BWP?< zg&KE0x5sN&J*=j`-Q&R=EVRRRX}c`w^XaCdpQnyT6EQBkiu8D^jx~3x4#?Anb>!3x zQ=QGOrgVYRo#pZQ^(aO&J$}0v;~;3a*)x3}w+FSgSre1KvUW|aQ_rR})DVwgum}X@SC7eK`ispwBO_6A0J2ngR6E3^U z=GVRD#B9Jw)8p5RrO4@VVrJxFDy#mlPA>n}*b*z2ypGm7-FLY;)~?BwsJajH+yj~1m^&H%)%X}_9=bTte^)itYbOx<8`ExqLb>VtEg*m8c$}y;^fQxdn zg+8Ab6HAQ`dzQ}&3%#f&03I~j=dpQgPK=K}V&IqN#OhSiHT-8jQ7=9WqcB#A?l2vs)AG;dnzx?Yo#N zBd0!Moji4Wa!%P)6ZM?JPZHhFCLa-j0npW`wM&hLE|@Z0xGZ`>bk@)i*cATGV|O zWG9-q>zOI)#cH}te5{qRR=s>z#_F5UBWeblMqz)z z3WP>s2HSDgQBMNr6F1hNvfT0#bi{ufRan&&Ua}+-Zc)2AtMFkenJSz<2gWF>zQA>4-#Eg)`@=#REI`8*GsJr4N`Z9 z0gRPWRgW!ORX_PVtVwB@+`Uyr3&Ys_arlbXb?D(yIT!7;yL=vp4SiqW_F(I`W3XZU zK!-WdOZf=t{c}zg4pa>-vE_+oYk6G@juzPdtL9-Jt<;;;f2vkvIuMI@CfXuck!D2I z#I<9aQ>QLWTy-i^LmCqogT|bSeAub2HMzx+@NzY2u_n*hjB1K=S~`|ymWA44^=hzG zmZK_<&+hQ6s$7^I9=(y~I`D+eP}Zj(T?|Vl4&rLC7ygkDL6t$MBDzJUV{4mPTJIN9R>g6D!m z12$;~9vGgC%kRN%R1++X)Hg*M_4%x{P@H&j)iEccnpd8WRmWNq>S$2QiyqhNaO}bW z#@xj(;0TDPqz>Nr1uUjGlB;D92Xov<&5SB*V#ea+7Eu{#z)qyj_uPD3E~mqDvMiL42zqYmry0Wy2yJcy1gEcp_VrN%Fq;=;OwbrQh z*>1NvF*VUN7xo=&LU_#R2TVk@q0PohT8VQ}LzA^qzu=%kJpQg?J%mgI_gz>TZmElP z#MNFL$W~(%#|9rxDj1`gPHez-wVl~fFLqB1m9kt69<@Hq!z&W@53Gkd^9r%0Rb_Xm z1r76Q=D$^<{`^wxpdp;hCdbtHcX-`6Nm^Zbb|1#0nxtx}Rja>R8ubobTpGfl!Tzn5 zM|1*K|H-$ZreGi*X>1ZZIu4h$Msl#tN)rS+6&!(nRp38@MV6DmMF-uj0$DYxn z4E2;%;^i;4JTn@~%*E!ZMmlagU(I3cM;J=pEIbq()G%rMSzd?Jf#;l$tx(^0zA25n zt}MsnNxrz1#uD|h6{_~^M5r;;8Om(M;kgy}%@d>tZ*6KG;RJ~jpB|`ob%ewkg55oc z39HV|=-B@pA&bx#Ve|xch-43{*93jqaHuyyEGKBE&zI$JJM|iZ-AbLhYKqnRhWDjt zBMvHw!2E#d#Y7zrlBvgycO;xFT!onqFBV%bx)cu_%eW0kQY>$|=EjR~fC)zvW~<7X zQ-ULEW$KaR+2~r)0|ix~wl=&;@YeBiiM3pJEUGk1NWL9M9Q1Hm(B^UDzSWKx491$9 zLuz9-<5VA^OVvxCdO7>gtVfL)qUup$vA4T$48d&jy0Wm9Vbdtfv!f5t1;Kv{CjwTLM>BYXP=C>a&Pkx+zaqMz>ajH=(NFsr0qNgSHY5m?& zRJCJ6%vurZXu-`+IUeI%9hlUffVU*9l5UqddtvRw_My(Pg}Bw^TT({v%W$hz71pjf z$wO>aC|0YhGu;?z54GaC>k))QvCV1DDQ+|lJEeL`Y6Q7G>dg)(XZ1>hExab9_xGq6 zKWAP;3aY}P_V#~QG`y1geJ*w2!9d5M67SFItc}rx>5W=(*7o6r##%FdrW!YRQ=UFb zHHR;U$wM#ZEM9q3E$S5pZw>kxm{&vfqEPD2w$@B3#g2%r)f{&VI`AG+hZ|y7dF(zM zRb2U*4h#mpa^rp2;qW=t>m+tc7hfk+9hK3051Nu+6mDn;HDLgoWzdHAAa(pkH8@T5Ru#e* z25Sz!e&7J4R`DwA5a#JEz^AkDDr-a7S`doY@7Nl$R$ZpL5nNu0T z*PodBP2Di{RU>&**xB%kgAZOf)Y$MouWt%J<@BLf%L;L<^zx?i0-+}TTmqOArO7F) zMf^s2Wuz_~iR%R+9##+N-(OH0h*}VETK956Q2Q#DS3S=E9}9x%sX}!m!BIpV-A!dq zj7|MLO1}V|n2GmUw@V#;$_hQGi>=Mi35Vi4)}!llaBlo}h55HiG`Rq;LKUF| znrdFcaU?^}qN(^$;>F7=YRAH0^JHQCny-v_02oSn)!~(@&jZy9un?|Mqa|B?OptHe zc+c~yb8b+*ZR_0*%p)V{}bM|1Z&^0PaA=0=P=I$?RRiu( zZ$|h~;&!3;YNj1-O-#e2*5!i>W6iOq)bfj$M4uBAR&U_yYb@5ge6##w?R7Y8ICbLl zT}`kUA72~Q7dfoJF0R0ZK@7jhuGLG$aX_6XOl=;w8{ccpVvggW8g^LBeQGA5RsT5- z6qGk(ivGJN;4>HAbktqp!?yaKgbe|o{dDWff;b6e;?o=+SjNC~GzU%QhEN^af6Cg( zlGFm~#W5L;!3f2NQ#>D>Bh@D=)h@MwX7oK0<*`d9mreEcu`PsMd1Rm%9|zHyeth=P zuYcG^u^g*+w?aD>3LiQd`(B<6KY?vDuPPktQ0Eb~JDD$TTEQ5~55&W*VevwTThkvO zaB9G4#9%DIP{JFv+M*mj{Gz(C{irL5vyFaexy7+MtXIizcKYiazDc2P{P?1bRTqaZ zySk^K*JIDb7a{a-ez0@b>PB_wzYEq(8vTcpV_%vOHE*{OO=VpXY;E?3yCAs$Mm- z1L4S0J^BMV`a?F3*d8y&oBG7>*PmSqaW=-Q0*<$8lThmhZX#oF7CIt;H@H$=YZX3e zH#UVLYKm9v?27K((iUn@9z-1;8~&!Dh7ERopBHc6_&AR5Ha0GaMhR3WA|{ zr}>LSurhfFt%@v1ha^(31$fKSA6Wf(1;h@ls>1gZKmLoVJoxyB(_&3gHBQtm*0NAT z2bx)puQ2+>Ah)7J?cjXDaN|3*S}pK`f%j`1jZl>n@7iv4-pt4I)4O;^&o5BLzwZ_W zfu>m7&Mv$_|NENa!`Eiqn-eE%mwtmrgRn2D>50Pu8i{)=!^hdCPPL(i8gNWkHR3pD z-d3t&|I=M=d_mQ_8s2_JCm7cc&bi(wZVA;vf2Rjd(VH5f%)a3mDn$nyDYv7;Ez7)YYV=z;uMZg`&gm< zc*}P<^@#J}%a9rpLj%}d%d@f6XY`wnfe^%Yy&QiA!e2>}JDkm}Rz~ch*f+iSJAkSQ z>po5+c<03)ss@)i7-aN24R@zD(2RZ;smk*3af5%0;=8A7CI0z;`UEBTYFBAoCoRD2=D_7swZFp2zfpYV(ykUK=s*knv9i}P= zryusXWh)=_6dB_Wf(JN~F=zVa@3*VH6R$@(P$f}RE9;m6q zF)Q9#i*Jdoq4wI=(9&4ER((^!VzoZ)f1~&#%fHce#dSCqHq?gkX{f13TH}zCS)~m) z+|`8=;oA1jw(#QgNLwVjBxa4Y)wbaCQ9G)vt&cRsYZIZ?ww7>KYimPYG3H1;HVzfs zhd0yzuR=wrG1%Zv*z}9_CEXk{#R9IW{t^Yax<9{6*ZYm?xcswlyDuiC$$%ufb=GP$b@7f zGBKHiOiCsrlandPlw>M0HJOG?OQs{!lNrd2WF|5*nT5;gtU@2(oXu3{$v0dNCuI?WC)3SsP1DivN&0S{EaM0hLWYo z(qtL3ELn~$PgWo+l9kBHWEHY1S&ght)*x$=waD6J9kMQ2kE~BNARCflWFs=1Y)m#G zo084Q=46D)l;AHs?=A2Od#stkszLJQ){eEoN0r)E!F_6D0=NQ)jeoW zuLIc;xoFlgBG(CiXQ5-6=>k7a_+9CDgWp~FJ?Qs@--~r`(eFdAFZ%t2-ye1S{Thhg z6s$qa4i>rJ;SZrdlpIF>K@KNJXsYucNpBQ6n*5U-gYDv)8(EbfOO7MQqd!6PC(@q; zf3m|*G@brbavB-$&p`ffN8a=g^h}{=F+ZD}L(V1VAwM5$CG`T-3sE1TUPLY?m!Q8? z_f5+*)%h)lzrvx7dDAM+tkzWC8u)94UZ-bF>&Xq|MoqQt&8Tr5O0)J!~-Ix(39b4jrRu##y{ zZ9h5tDae%QrxN|t&}rC9i#iYMbg0v_&LHv`;b#&$Gc#G>XBB=n`q@ceG6$KH%!Rq! zx=w7oYq(D8xbu?v(8n|K!!H0`ki9}=VbM3EZxOl(GgkOE;oG78g!b2UVm?c5ft(8> zgOLvr`S{~32EVxQOECW%{F1^Cg)Sv@Y3MS{mPK8Sbve}KO{OHcPHq)6)%B>zekHOp z`c;^#>d2|N>de&8R9;QwYtgUm@YS~Kq8GznJ+eO8faE*j7DnBO3@00tO|Y$|tea`7 zeKdz3LB9prQuJG)A1QQeX4=4S%Q{N*+tF)Jc0jJ9=ttw4)M2j^dLytpQ^$&27kY8% zcV*p;>`wL|dy>7#-kR$C`e44V(EXU{4}XC02SN{GZ!q~gIYi{}e!C5$|A+90)JJ?!lj{yzBog+9Q{LHLJ+ ze;E1*dq*99(v0wr;deEjRk!1yKQkwwPeNw~Phow>I?di0)Q?!7MSYI-dGdm$I-iU5 zE+KcB^%e3ed5yfTspf9byNS75!oN-b4*a{qzXyF^XybZ6bm*kjp&xVhiNkk$3jZ1X z=fZzM?KB-wFMmnGf(k3jY)IXZF63Uq$~v^uKAHw2!8{iF9-1++iBm z$K8YUMC~Q|381}&_F*m|{6xY}44p*iq|nKjO)mTt@Ka)?XD=1%)Fx9>zBle^=%K?#;AQ^;QFjft$5bde+GVWb5W{L~H1pMEG zF3C(N{8Fq-lVwD%EWL7MdE_gwu1HqWRQsu#do9oI45?}^L=tX`tun=|q6 zdSCSW(d#dA1LzGT2a$u3|DE*^O?5wp!XHNe58)4|Hv;{U!XE`aTIfHa$1wYs@W;X* zC-it-CtC=ABGy69OhP>wYbDkcdQ-`13h!_-H}qvXHjF-^6dAap@y3Xz4;H;cZ7ei8Uq);7_%>pD5#0}p@X z0bPsut3%c$>mgrX(6?C$PJ`72>rpVe`U!$7kax{?;-b!{yy~g3w?l@gYXZrJ}ml2=p9A>U*R8vJ}&eL=#xU9;@oNS40)C` zuIqVSr;I=Li(0GmxP<;?X0HhUD!psyUuS)TyeV?G(7#Rpj_~iozeoQ*`9Sm^qW?(f z$IwrhdrCeN{paYvaOjjBpkLvA_zipw^4@s7;p|)Z?;LrL_w+x&|H%52=zoU)h5lFJ z{|En@(BF0KX(HW7chZAIz_`AiURViqt+Y3NA2K1HEAd`=CPqF9Rvz|~qE05}lhaQD zKPBr_WNI=EnU+kasrH{9a~Xur$V?{qnT4N)epWIY`q{B^Qu~rQG*jX^^vp#sx5HEW z%u7F?rt_*^bBM#NK<*i@I&b1 zKB;3WCi=zEFX7OtUlLvetWf4lX{ugn_+^AH3tdj=@|>vvzoPIf(XUKaA*+(r$m*D@ z!MY|{OH=K?Hu`nw*A;#}`1Q3;g=?d-4aqRFk*4Z}(`$^mCc=(fy7k?qiL&l>NxI+l)Pw5H*u!uCBoVco*&?C?Efp}Sy>;7lCZ6<#-y z>rTH1*%SRL3B72ne zDAc2|W>f!(dW^2s_WzmS1@KBh-&^=T@DmE1h?&G>67-Xbelq&W;iq7ol1xRWCevuD`-5lD zD;-t?tn^rTzSMDLATyGg$jr!O(Y4BDh0exacH#TN&mnY9W^%#L%{mX6SLE{1%a2?E z)&bYMaR0>G`4W&pLn%B!kFcGDK7D1NTa?7+IVwf$jY! zawVZd*()Xd(k4?HoV&3NwVkqLIpoW;t|0mq;a3v6GISN@s*=^n>LOPIxtjEA3BNYI zI_TFGem&^=TBl*&t0BB-tT2&l1Rc(8G$xyfepB@EuKzf{=Fkz$wIExHek=4Ng>DVq zM(DPjiGtrw`0b%P2;C9ab&$|8oa=;KXV$S~7c!3QN_Hc=lRe0un(BV_qSqVS?ZdjS z==X!)U+4kM41_O_Z!nH`oyZMgZYXlYg#QQhaG^&qHJirg4_f1y8?^*GTV z4}XHt6PcL=f3onW(4R_9Lw~yH&(L+6+wlLv|IQQdu-8nivskmRwqVWX>>P5gn4gFK ze4!UWFBEzaXBLx7&|k`WnaC}Nze4Dh(5skRE&Mg`*9y&h;I$t92Ie=Co5;-a#vYj(^SWQo!$-fZ?e86`o?o| zN9eoE+#~O!|3LH~LO){fG5G}E3)W9jKQo!q#=n=(wQp#zm(08(Upsp0Sl?=`>UYe% zCqH2RBkND(XYz}tn)^!cKjgo${_f}}FzGsh8|hAZ;F^1~_R=)AmjDBb-lPwi5WPeq zmsrS(U7Ye0A0}G}ZQM(yK+*My?L)x*}H(etr54$cAK?$Th-w#P7Q? zX1ZZDVXmpjHKW&@j6kl1=(nWb3VtN()?^!zYm0sq{dU4{Pp<>n5xHpAF=QuAbsajR zA1ibh=s2Of;=BfMt~=R->?v}+FxOk?KFsum-%t4cp}9S^|AEkhm>n!~ztbB+4n=O5 z=>I`~IQ$W;M~eO^_@jmXlbJD^>DHh>)@1Vj4;+W}4Qo8sVdf^Fo`|)QdJ^i%tfy$I z=oe^}4dRCBZ7&lY+P^jx9mab`aJ1;Sqly@21UIwzJ+L`a9w85_&iE9_IE6 ze;>X5*P(B)cT@Pc=-o#Dj_~j5I(;qp_l1x9r_Spk`G|as+!NMMHC65z zz31c$@+J8SbFW#y(NuG9>AfT0Blm&zN0IwP?=$*eSbrt)ZW-G$R(iaz3BKcftE08I z3AGz*chtyudpPpmp3q)cxtU3T8bijmynRG2A#@_46GJCqE-9IeOfGUM=%qwH6;@{I z)TqB$V5>R2)&m&u{k9G^+=tlVZcGCS!@<{)!ws@YtQZF%QmKQEb& z%&)2TUw~dgvJhFAG~@Uztcz%>IV*gd&~|40;QO-J7%kHuUU;m1LD6}lUAccFVg_vCCZ zvNzdB3U$ z4C}whvE(>SwZHMmO`t!KoJ39*xheFfB0r7wbaDpyx2D?eKlEmjv&h*vpWPxim;O9# zcRuR{qQ8*dB62ag1o@?`myydg)&5tYzf$N`(5r=B!0cwSi~bGtZ_>Xd{M+#F2z{5C zd+_f&{EQ9hKZO5?^<(l0`ILM{KG#&|`-0v}%)es&n#8>|GAj3$d`G?~KVa@7>rdoo z@{6XL|BCC5_fpm0$nPeT58eYG6INy{H%H#b9oj=^PiQZ#eG&-Yn;tgelaO^H(N9b- z3HnJ{CnJ-KTnhA4(oZG))b!GjX~}eCdNKoECPL@FKH<2qzKa?zmereG!L%%Hia>6f9zXDki{YtDWlT|d; z@mHl+4Y}&N&J=(BYl>Vg=4z95$hw%TCvx?n8?e`q3={oE=!es9Ec_<$n+n~Gnda~# zSho=UmhfBAj}(4ulPMF9!8kW{er?GpvK{8yv+jU;AXZ1}XiYU2L$4G1orNC@-G#k4 zva9HKqt~76fqYNV?*-kPy*^}LvLD%BQys?uoX<$12Vs7&(7!_u;oMN+52N=7Ih-7U zyxiU>`lI3hi8YCO4C=qI?>*FGp~q?MGhX-;*q^9rnf3f{ztdEl?$dj7tdYz(oS~GJI%$=oo4)@C&`aJao)UU8EYOUrk zL0@M7itw+(zb5o`W^Ta0$@-S)-==qmyi49A?~@NO{}AgL^&|2z`2@YECR66tS{w9v z&fW|1rK4w@gF27b$i30E(r=;PvG<<*K+6053AxYozi6uYukim9`WrLfaZOFSPUuFu zlOCGJd_qqwFZu~IGxPmQ=p%C2Si(eDzRV^@okaAL(oY6IIqMXnpORiG^ivBz4Rl)e z(g{C3y$tAQWSvR$GtqTji{z4ShRnKauk{nQ#g^8;Hy_tRSp`^ny``VAZ6KKc8aM z#nCIFYqh`M=$9lz(Jv+XrRkR;%c5V7b$OAi0KX#rO2V&9uL}BAgj_;Sx`EIQIU7bcLO)#e8$&k{x+!zb$mZxrV6~xcfx0D*EgEd4 zsg5y{eZ0qpR{gf3j-uBNbL}0zn(e5ysYZCU?lN-=G!+ImR3EpPb zTga{CHgY?;gWO5((p2|tH@!XB-d@)GM1Md01N09H{}BAc^pB87$$!aXnrc7Ckv}2y zN$69|oz~1+hWf0?okQ+CecWTUt&8L(@-mo`^%dl=vc87;CF|>`Z|GWW<0k!E@Ncue zL*6Ctk@q##+yi!SzM=OP{dcV2lOM>B zNMkM`>qKN?lFx9Wq}0hw zrmVOJiIS5kNMjqRm`P2h0rO#{rA|ktCo_=7Y)00Zz|6wWf;y|v*;r=>eOc!qbCSmP zaTWim+z|ZLIC2AL&m9kb$JJ-yqh(V2G}> z_0+^|75{orp>0f;xTxqhp%tPdXzKI(6S!{upK)c267i1Q-Z{bj1 zzfj+zp}u88eJlD_Wjx%^Vh^zDWe>rCR(6XmIK+7*6ceGoiR$@V{QQkw_y?FBrE^qm z9oaAh0&zAtE5BeP9WQ>`bbP*nzr|mlx6O+9kJ`o7jZ+N^z)@N)Hh+uV-)eSu5=*;)S2p&*OhxG~lsJ@iAm7%~nm+!kQm09>aK+*EVC3bI;)Y|bOR ztl@U!(&MuGVIbJ-q?`xhFIJcxClg|~gy0HbKnU0Qa6cO^Rfr|X*on>Tr2fZ5puZ&m zhi1pag!v!J^+Xes3I*dr;dWR9^vnwMpv7+UGmF}FIt+;*JlD8Rfx$-n zhj5Wsw*t2zAQ&6Rc?V$!4i)m#bHZDJlfa$EnIew6?r(OME_S0H^&pFJzl~?sZuS?# zbv_&sTL7+Tu<^hf0}k~+CW37if8#d#s|l)rpB||nFL#)+1DtzMkR>R4C$A!BdWHUP{7bDn3xN-ABY?csQRhz%rNx7bAVp8`S9}ULM4hk_lOV>l; zL3pfi3G6rq9CX8oZ?+4E# z9$$3{j7`K#yVZH>$764Nmg0PDHXK`Ukl9%}&;0lhQlG!L40cO!fST~vBG7d{9FMH} zh{L_K`8ah59cjp zuu(7OHjCPM821hl{X*btHqaPwDAyCl0pU{$U$Y@NTSx6W-NY1PyaV{~z~{U1 zVc;xX4>br0wBXsm7Zg6(MD03_Ut@TE?8eVA{8+$~;VfMY=_k;Jk2E_TKY#UMjVXup z_jfU6JW%eqNXC1lz71`5KTJ7k*JI)8w>Vz#kPu67FlO-R#;adV;A1Bk-<-G~_ziE9 z%Jl@Uf(@I+vg42f12H3|>!EO)9Y5KP-#*wCe#T1WdIGz_FGS;~qVZ!qz=jzqe>r5l zP{vP7d>!KVK7Pdu`O6_ZWi~sKcyt1Bsh#96hTLt&k4!7RJB-^JfL%H3UyPZI)3M_> zr*ZM{S~^Kre`Kh4%@&A*!;QrEFm@C#@EAGoCH{NST}AK$*o`koKU{BP#G#$1;y(cJ z%;D42_?2UPOBmnvKWP_Z`snaMi8I3S*|94jTnwlWOPle-1N$-l$gmnCQo0@rSARDd zFA#oKS3S&4X8c0M&q+Ml#$JAiUwp*ku<)|sGYZ!? z*cfoApFYFmuW|#M5y#do#(jye{eSiI%iR`&hZ&ES`Y~iYX3qK-r(w5Z`}iEhhj^e_ z3fK82?&_ASZ!J7A_-Xi){>7NdZpD$|r^;fxZV#cjew+GkJI7BDyVrS~(*ZB5UpY+C08ovPZPuj(p z{;XGjwBd2aDcbEejNugHrRzW5!c+#|2|)oC{8elWP`KEMdVP#n2fwn^U#!M|?jq$+ zzvbeeI$Q_ieKWpKj6X{7C&o|u7yH5(&R>1N8{e$Be9qd%n7R&n4j;aD{GE)aP|s0g z8_sk5+zMA4$5%N15W*i?PRe=U$7z}D#jUKkDxj<67Xi@K4&s zm^!I+_CTwxXo%e!QM9PpQTw%S-MT22%YQg;%Q#WBsm%5WYe>;RYfF1kf2-AMZ10B= z^ovcYef*E@MPPgW)}k$Jt%LRU97g2->+z+tN7$@IZ4oVl)TZL=|DRJeHsx0|u!Yr6 zZ;JK*%chnddr>W-N42QvR+g4A9Uq0ah>DJg?b)n#Ol-%9xMm$A+Qq~+>lPW?B`PL* zw)@ZBsAxxbR+ScAqdK%|7TGDLWm`}8HYpmEtkxg72P_< zw^M9Xw}_TKo3)CJi)7Ai-_w2ExjGNx;E+@3^5tHegOjEQa4tV>j{$l1RC2R1F; AjsO4v literal 26352 zcmb811$0zbw1y`N!QCaeX2?t&DZ#C{6DWibAV>lsXesXQ?(Xg_6fG2YZK1fkw52V) z|D5wDv!`$9darApe*6FT-shfs@40tovbt)qSlld@6af~C#hvu9SbTnm_N4YAy~$)` za?*!PL8c^Ak*UcvWLh#EnV!r*W+XF_naM09`q8(o~Y|3^1bP+b@yf5PvpOa-=F?>oDG0crcJe~14#N!c9qMkrbgs(gd%Vd$C z!u(WnnxU?nA>x_nncN7;}wpnJ4n|;V%$+p{ZB*MBkUi$S+~Mlw4-0^ULY2 zK;25=uYz7J^cv{3La&2f&)yBf-$-v0xtZL8`mKz&k=w}~hI&0aQMZfwAoF|3?~%KJ z@dNcsOQC8mP7Op3;ziGpXeVI{xNz#lgCkag7HaFcMAS#q0c~{W!*X9 zpGW+b{zdWh)j8yf=jYg*Cszzlre_;}*T&MBQ!pcj(_G?}_|<=x6MCj=C4Ze+m7Hx!2?ylIO3_=N-NGsQD6*lzfjs#U|y9m7LtPiQZpy`hr{ot%9>@KXpsC3Gt0QVTx~ zy|lPaJ%yhh^%Y!8oU=%S8|O#4Qiwydv*QFCX&x85a=w zg7gX@Ul^k}brG^CSr0XTq>Z$biT5KA^931C_Xk6VuqKoY zBTJKI40U~3)RzS?{bLfVB==K<+CPshO) zoebgY$)=L=rtyrpuVZdH>2MiehcA8&~FL9mGHlTZp~a9GE(H* z(u+b}I}@kE-d5+V&$~U@0r`%?kAdzabZ6);tnEt1l5wK08|u0X9nYE`WCHT?Iq5~e zH`#~mi@JV{zcoz7ec<*Taxvc_~KMnqL#xorGRJf*YvoJO@H=CRTZ!Y6`j(WHG@E6cu zDEvk67Yn_FHA~55$m5w#y8kPnR|>s~b*tg8u~<@JZnw4MIzxRv>shyf+=#kOj=b*M z0`COIR@Q7Iw;Sqtb|Ak~=v}PYP3|GTCx1ZwUdH^*YYdKL;QENqrZfFA9AL`m)ehps%v` zn((i~zajK5(7$5bW$jJGw=9;_p5SlfZ9~1DJEl(O?}_+6YagKRq3|Dx-Y59|5RLp( z9_tx&AmiuoUqE|Nzl46p__d)v{u}sj>Aw^Hd-xxO{vG-cq5ovhNBDmU|8MAjg#HBm z*`eJnSd*L3Fx)+icE>eRYf2p~^4`oPBa@Rp=u3f-3nL}HRAg#0jiH*;JuOB$`sv9G zWJWR*nVHN&W+k&3>iM!`z8pg5G;!+otj$g4A@ic%*TlL%A9Q}^3XlcKLZYrP>WW}k znJbF87_QL_>f&SxvLxwesOsEHiP#_7#u__`>!YuG;+#RKsf`hAe7)Wf=un}cI zxq70$KD`FWHxzy&`i6l&`a63j9gBxAXk#B(6^fL8bh`IG+4X)I{NF$4UW9~MtYl2w^{gG=x-&rk=s$X zgYiy7eVul}-!1eW)_f2D2gZ9vejnDx=h1yXYRA7h+2 z-*M^_AV{JXBLY`9q>8*7vxL$zVKgx z$?3g@e#7{!p+2v7@ZSslfi=Iw|3mnHLVpzcFV_7{{zKyWx_>sYhlO-ARLAph$3Q~I zo}?G)O(ru;+s;te`Y@LQ{V5rz68Y5h(vWFUmyU6IGJ~O>FC+4qgw70|Md+;1+1Qs| z_&Mn1L_U}BbJNd5<~2+^lR6*j^D{1hIE-;YvJhFAEJ7AVZ865h4fXm$y7vI1F=tc2rL zW?aQkucs<}>}?fmU7f5!)i9H&UjgW6F z@=fSBC7Y4W$rfY;`dTt>MSf$b*WDWVHuNLOwqz73_jfev+Z$i^cYy9Fbd1H44)mlJjV0M1)^>ty+x>7?C{fJJswLjb}_#U z@p8s19CaQm;ja>UHEY(uU(0wMxt`o$sLy93>Ne5eOl}eRt;lbqzg_q{;O{g#-G1Tk zX6+vGd-4ZzFS(EWk>ng62dEE{hseV?-Vw$>8S3>PrFRVZpM`(i#OYo!f08^!o+i(b zXUTJz=RD&JhWa=c;a?K^GV~SJT^0T{_}A&*5dJUle--*B^exu?M&2gxIO@_DLft)l zj%Nk$V|-%G1M(sH2)W0OK948RPlbL4{aol5&@b7Wc#U3*{2S)pqV65z_ags+-tXie zsQXjoKSKY-+~30g$HeJdF!$Ne(}Ht!W9&|P7^>^LCH0YsB0tgki~9bIzas}2>h%qzHwblug+GM;Q24_b z4;T3n@J9+g3VJl_#t45bJv^(P;~7tI!u2S8olYr&tN>0oF(dJ z)0>03xs2zL^F`eP^gZbNLO)>NL->z`{}>wAQaxMx`Fct| zga2IkFX+D{Uy-j3GuEMgOTI(@dlTzhjM#f# zSyK!C*TSz2&Ashamo@ds`l7A@ zy@sf7B>cuE&V+mE)f5AJ+N&7`o?ox#7$Y%Sur~s6JmZ#REAkuUS~G4#MjGn#X^VW6 z(Ct_g4Zl6(4rE6%hU{dh=jn|4E<$%@O)MElc0*lvM_ndd8?PSBC6GPIUXD7i-l*$C zzc1NO@jk(d}7;-Ff<1iLc zk4HR#@kB#?ohHGbEc6uUsX|X<&vbGI@-sz#7X8`q=Lml;^gQO~3x5H+Mco$Uw>osDr_kG3$Mt&c zq~3+xNyfX$J>>V~59D5QA9{W?vA+KM=^uc9knth%FnPpK*Z+jNqe35p{#oeb>^WhW z*%$d!B7d6sGvrzF9O}-Cx(m=3nY$$X%kZxVeU&xW;9qBaL*##f|Eth9p>G+Txq1NaXaKN9)J@Sg~c`=+ngGx9n40(CDLzcSQyuj#!({w>A_>UZRO z!^|U*`<>n&!uu2cNBVyW|8I*$_2^@K!hh#njqzv17L05dZpQO=Cp`>RkGCg=7kxA+ zPy5N>Cl}fWI)%_Fp;NIoweZu>ON)Fu;ispc0e(itnaIqdE(^V^I4|sFy_W3g@n@Wa z%t_`#E;r*mhPo#&J?u~Ke8SI9zW`Yf`9dOJn0^uXMTK9CesQt{SrT=AjICrT!_2tP zdM!56P9|RCK%;d{5atPHK1BGT@Wber7JeD{W$Bj_{#WqJ(+?*rkQK>FhWfab=~W@C z8fL+HdRG^9HK1!^L}Jtu{@3iQP1ZrauE^J;Umrf7NAHFr-w1wV)-)lTlFdY2bJVq< zA3?Sh`BwD4AzP!qjft}$R_ChEw=Efkd^^U`BHtc<2cbJc$FQyw*_rG@b~V)V$D%%t zemCKFrx#E5AQQ-*WH0phHnEy7%S7nD7E5wmXYYO%OS134Z!zv;^v5`kp}d*wA3zR- zH^?!M_h9-%$f3v&V?3N3VW`hzB=V!^k0!^6{8;422|b=Q6X0XbY7RZmBStqYVeK4pF1&f7Za(w^<`$BRM1C>7C8%2}{AJL~gxy{qIk)L&awCQo6y;zbFeNanM>qzBcDg;ysYsh z^O5<<0;n&@xR9Yf-@@>V&@U?dV)TlWd=|3dnkMrTbynz7tnnvpB5$Xcczpv=A4D%$ z)P>LsCBw+ls4v5~tf5|aIeK3qU!HL|;;M`*Ag*Y!WK+khWT^WpGhf9}<#kOp)KzD$ z23eD=<*3zj)TUp@Pc=`Wt4OkNg0PyYvSl9)vN6dN4Tz-cZKF4E6ek!yiF^ zr0_@48;$%J#$(BG|PZxRy^h}{=v2V6vw)4o(#W+rX z9;wcM0me4!g@_lKSg&U>{Uz|1GG0b5Cs!Ei`jzxnk*moy=wB=9)P6|+-a!Su?zX#Lhph8o^?M6e=og#$p0w({m=)5KFGR5@DDRSLjFV^ zC65{Eb^L6xWDgPg1o}_1?i6{NJR|DPqV61h+&ewj1(ClkHgr6QdgU}hFGqE-^nT5WVWiVW`(p z622dXKZey}$)T=aDMOW4T6;E!mmI$C4J3oeU@H0YWE(OPeQg;>k?joiywUX9qpkzvj)*%k zjxo#;hx>@3d+-iX)NxnT#tNNS(~bG=$j6I(5BdpYPvm=vd~f=F;P++RPvpO)*Pr~3 z96$~v2cds3<00fwL%r@{^oHZS7Be1+|38S@f1@zI#~6*V1Y->IV@3Zs_~V71z?zBh zCkcNt{VDLL3V#~(bmnG|Gev$Dz1id()Xx?9dC>EPUI4vN=tb&XqM+sJs6p}x+W>1{!NE8}eO3HCRX$1 zOb-16b9)iL!Ptj!gWiwie)0fv2N@qC50gg>^*Vn--BF>BvF2y;IPxbLpA>bc;Gf30 z#M~M3EWC4AZ#eaN=nFz$G``BKk^C~o6^EA`_bmA}_Fi}Rdab|E|J6`?H|gCXe?$Lm z#&^iOa;>ey=d zU?Rnw*zZ0m>7^o5lWEAbhPp2uz4T-T^k-z8Nz`SApGD}b(Ak8}&Ym1(PULfmd~W)A z$h=r9@3&7rQJ0_j0_ZO&{6h2#!!N?PC|QgwZm7?z1nNo(?FVfYx)gi-;oF37hfchv zfvgF#SaRWf^}NA`O8bOj;Cb{3!&r$?8lw+J8H@(3FH4pqze29OiS>MVrhFdJNnUNd$I#^F&G_jUxpe_F)`Nb>de|M zsOxHC?Z?uOgWrvDcQT&rVVG+#Y7#K7Vo$Oc;@*zDp0h8!N$l;1_*)a}Tz}~AgdV`U zf$#?je=z+a@Oe%?!{`ksN01{8_57pgjYi)X;g6+1jvP-;K;1+~U9K1OCzDgispK@& zPiH)XoN1_!KMVQULeF8%Tyh@r^F@9E{e|Qr&Xq|Mnk>+O{m*Ue+#*l+(vFEcaS^LvybsE#Jeq)+}Qg*dkpow-$VaE zANxn;m9Oi56!Cu49pGFCg?|YCVWE#W=FsyWMgAE3en$Ko760Z zlIO_t45c57_{8;3l(0hvfGse$J?sYxSOXOeCe{HD!H}K!me@DI-`41+}t?ut1%>M~y zVf>N&3;Dkp|3iKvKO3g7kZzhLy#>hjRSNOh&^9h}wF`n-f z1sN9-`NE8gkVU~_jEj>c$daTVX(iQqN-_2)ZJ?bo@_EYP<~aTHuv*8s9tknA9m1={ zMkn-+i|^dnW(^9k28CGz0{k3dm2UBIefox1iI46Q**CmekLdVbF;UUsl@j9O!z=ab z7F#(XzGw79UpL?Cz9}MXR+}xvY6}js+JZtciB=*0R=dC5w`jO;sc_%maNqLbzLmq- z(LBIv4@Lu;0_=WLIM@4DM?<)8iiUw!yEP=#8fy1TidR%0s=NMtf zbqK`iDcJp-l=C5yIs$^N0l4O&I7XnqpOgA>hdn?a!)DhV4)ukOKs#0xVhzK^w1xON zDHk2;O0^8cnsDxRtj!+gC)JlOmafKj3BdIY^%E-eUmHlDy54hvA$ncv+=B!B911td zd0o6A%>%=%xOZXxsK+6Ma4t_ef@-h_hFSyE9aIfT!nq-FPq$PhK{)kb8?Kgapn~(i zTht!G)(Z`^+Uz!*ioc(ea&Abv<3V`1)T}rQe{@Lor4F@K1FfOBA2^L5sa$uovfu>l zcC0H{U8_JpC;5fV%O*9u+P-0?DM|j!Q3_+LI~IO5hw}4z0nV$Zb(vJxGfQ(>Q>^Z!n*K3UjXc>b{cZGkpFDV)m_H*s^6s2dm-XblKf^-01-y;@Q$3yN@m z@p$1<`Qz;?q|NoB(4*>c7TszKfWa3eHR?!=cK;S zf!Cr{-;Pjg7`}=~<+`Jl#paKP#6L)VG~ii5kCZN&%$BsN4=X(6>TwSVLYG5^erZeE z{PCp#FGaP#@TfRT7e`X>2vrn>OAzR94bdHm!gYOw%^zFDA7_uVQ4K=5?!X1Wi_wN{ zk2`7$4fAuBuA3rs5k4C6e!%NdsxQ8V@sd-U(Hf?%R4{s^bbT1LHdNuUSD%w%I3FkJ zx(RDj4;CI_>|)g-l;i=RUqI zaV$G7HwuGsxz!$ZlFmJeugq5NHnlo+Cg`-|%ih2A7us+vw5e|}>bod*sgrcw)ZB*c zsecYw?P_%n;krITmDufJR&}HB_(wZYYa~E@55zTa)-Dc&_Z99f%IxYZy!u?gOU_BUZfdUXsCsJ$=?7K_*Y!9EzRKG| z@D&jINWJEqdE5n-fu2VNu2-o!yHhjlYw*#9j1UFME7ae?! z;3cYWh7vPTDD~S2uOIcJSWS{7U2lh0?&?$0riOYs z;5X&J^cRj5q06w>@Z%*|U3W}Cm2~y0RgLVC(4<}+{gs`rDx)f|lP3jB0- zK9;!bA?oL=O?_{`*Ax61LP+PH#I2-G3*YzgT?>~SPolaOPTEDAx`fTtpLp;}z}pBr z(8RdOfl24&_%oLJxnRfV7j~=K={9U~sazZ&u|r)byo>{I+0^G>qH@up?`lqauzx`5 zApfX<(oz2Y{-sO%IgIxBr}UTpEMj_(l@7rWT~-`SX0M|M%IwmoCI zv}+yREiS5~r+WwYV`*!*i%f`Y8`&eebwb~6(XEojbc=~?ALrXGKBiY>RNvO^q7$N{ z5-@S=9+6$Ub&0li?b@zwt#;8pqT*w^DgSoG=-B^TSIwAiNvrg((IqmrL(j+#(LLNe kF+mD)vq8I!A4^hu&qyPW_ From b2a2a5308d9eff1f8f2bacecdecde16ba811c224 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Sat, 19 Oct 2024 11:54:32 -0400 Subject: [PATCH 19/67] Update execution info at end of planning before kicking off execution phase (#115127) The revised took time model bug fix #115017 introduced a new bug that allows a race condition between updating the execution info with "end of planning" timestamp and using that timestamp during execution. This one line fix reverses the order to ensure the planning phase execution update occurs before starting the ESQL query execution phase. --- .../java/org/elasticsearch/xpack/esql/session/EsqlSession.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 788b2827d7c8e..ccd167942340c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -162,8 +162,8 @@ public void executeOptimizedPlan( ) { LogicalPlan firstPhase = Phased.extractFirstPhase(optimizedPlan); if (firstPhase == null) { - runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); updateExecutionInfoAtEndOfPlanning(executionInfo); + runPhase.accept(logicalPlanToPhysicalPlan(optimizedPlan, request), listener); } else { executePhased(new ArrayList<>(), optimizedPlan, request, executionInfo, firstPhase, runPhase, listener); } From b4a58175b7f54a858ede46b81ab6f5d80c1be97c Mon Sep 17 00:00:00 2001 From: Ed Savage Date: Mon, 21 Oct 2024 09:10:14 +1300 Subject: [PATCH 20/67] [ML] Unmute MLModelDeploymentFullClusterRestartIT.testDeploymentSurvivesRestart (#115060) After several hundreds of iterations of ``` ./gradlew ":x-pack:qa:full-cluster-restart:v8.0.1#bwcTest" -Dtests.class="org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT" -Dtests.method="testDeploymentSurvivesRestart" -Dtests.seed=A7BE2CA36E251E1E -Dtests.bwc=true -Dtests.locale=af-ZA -Dtests.timezone=Antarctica/South_Pole -Druntime.java=22 ``` No failures were observed. Given the location of the failure mentioned in #112980 it was likely due to a timeout on a busy CI machine. Just in case I've bumped the timeout in the busy wait loop. Also removed the now unneeded `@UpdateForV9` annotation in passing. Closes #112980 --- muted-tests.yml | 3 --- .../restart/MLModelDeploymentFullClusterRestartIT.java | 7 +------ 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4b69eacba7b1a..b7323bfc1de18 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,9 +214,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/113722 - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests issue: https://github.com/elastic/elasticsearch/issues/113661 -- class: org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT - method: testDeploymentSurvivesRestart {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/112980 - class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT method: testNonGzippedDatabase issue: https://github.com/elastic/elasticsearch/issues/113821 diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index 3e57faea848bf..dc9afb1bec237 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -18,8 +18,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; @@ -93,9 +91,6 @@ protected Settings restClientSettings() { } public void testDeploymentSurvivesRestart() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // condition will always be true from v8, can be removed - var originalClusterSupportsNlpModels = oldClusterHasFeature(RestTestLegacyFeatures.ML_NLP_SUPPORTED); - assumeTrue("NLP model deployments added in 8.0", originalClusterSupportsNlpModels); String modelId = "trained-model-full-cluster-restart"; @@ -139,7 +134,7 @@ private void waitForDeploymentStarted(String modelId) throws Exception { equalTo("fully_allocated") ); assertThat(stat.toString(), XContentMapValues.extractValue("deployment_stats.state", stat), equalTo("started")); - }, 90, TimeUnit.SECONDS); + }, 120, TimeUnit.SECONDS); } private void assertInfer(String modelId) throws IOException { From 22b4d814d19f460b82391975775e8ca0e487d86a Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 21 Oct 2024 11:31:24 +1100 Subject: [PATCH 21/67] [Test] Use stream.next instead of setAutoRead in test (#115063) For a more realistic simulation. --- .../netty4/Netty4IncrementalRequestHandlingIT.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 26d31b941f356..b5c272f41a1d5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -175,12 +175,16 @@ public void testClientConnectionCloseMidStream() throws Exception { var handler = ctx.awaitRestChannelAccepted(opaqueId); assertBusy(() -> assertNotNull(handler.stream.buf())); - // enable auto-read to receive channel close event - handler.stream.channel().config().setAutoRead(true); assertFalse(handler.streamClosed); - // terminate connection and wait resources are released + // terminate client connection ctx.clientChannel.close(); + // read the first half of the request + handler.stream.next(); + // attempt to read more data and it should notice channel being closed eventually + handler.stream.next(); + + // wait for resources to be released assertBusy(() -> { assertNull(handler.stream.buf()); assertTrue(handler.streamClosed); From 7d4f75ab802a9e270970763329b05e57d1044518 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 09:36:14 +0200 Subject: [PATCH 22/67] ES|QL: add metrics for functions (#114620) --- docs/changelog/114620.yaml | 5 + docs/reference/rest-api/usage.asciidoc | 3 +- .../xpack/esql/EsqlTestUtils.java | 3 +- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Verifier.java | 4 + .../xpack/esql/execution/PlanExecutor.java | 2 +- .../xpack/esql/stats/Metrics.java | 44 ++++++++- .../LocalPhysicalPlanOptimizerTests.java | 2 +- .../esql/planner/QueryTranslatorTests.java | 2 +- .../esql/stats/VerifierMetricsTests.java | 95 ++++++++++++++++++- .../rest-api-spec/test/esql/60_usage.yml | 15 ++- 11 files changed, 171 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/114620.yaml diff --git a/docs/changelog/114620.yaml b/docs/changelog/114620.yaml new file mode 100644 index 0000000000000..92498db92061f --- /dev/null +++ b/docs/changelog/114620.yaml @@ -0,0 +1,5 @@ +pr: 114620 +summary: "ES|QL: add metrics for functions" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 5fd2304ff9378..27cc1723265c9 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -38,9 +38,10 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] ------------------------------------------------------------ GET /_xpack/usage ------------------------------------------------------------ -// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*,-watcher.execution.actions.email*/] +// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*,-watcher.execution.actions.email*,-esql.functions*/] // This response filter removes watcher logging results if they are included // to avoid errors in the CI builds. +// Same for ES|QL functions, that is a long list and quickly evolving. [source,console-result] ------------------------------------------------------------ diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 693b6fa8bd670..f5bcb37c63e84 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.DateUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; @@ -260,7 +261,7 @@ public boolean isIndexed(String field) { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics()); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index b31fc005a0a5d..adfba4c487618 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -390,7 +390,12 @@ public enum Cap { /** * Fix for https://github.com/elastic/elasticsearch/issues/114714 */ - FIX_STATS_BY_FOLDABLE_EXPRESSION; + FIX_STATS_BY_FOLDABLE_EXPRESSION, + + /** + * Adding stats for functions (stack telemetry) + */ + FUNCTION_STATS; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index ef39220d7ffcc..e2717cd9af0d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -58,6 +58,7 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; @@ -480,6 +481,9 @@ private void gatherMetrics(LogicalPlan plan, BitSet b) { for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { metrics.inc(FeatureMetric.values()[i]); } + Set> functions = new HashSet<>(); + plan.forEachExpressionDown(Function.class, p -> functions.add(p.getClass())); + functions.forEach(f -> metrics.incFunctionMetric(f)); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 7d8e0cd736445..ee8822889bedb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -48,7 +48,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(functionRegistry); - this.metrics = new Metrics(); + this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java index 6c5d9faf18ac4..092fecb3142db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java @@ -10,8 +10,11 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; @@ -36,10 +39,17 @@ public String toString() { private final Map> opsByTypeMetrics; // map that holds one counter per esql query "feature" (eval, sort, limit, where....) private final Map featuresMetrics; + private final Map functionMetrics; protected static String QPREFIX = "queries."; protected static String FPREFIX = "features."; + protected static String FUNC_PREFIX = "functions."; - public Metrics() { + private final EsqlFunctionRegistry functionRegistry; + private final Map, String> classToFunctionName; + + public Metrics(EsqlFunctionRegistry functionRegistry) { + this.functionRegistry = functionRegistry.snapshotRegistry(); + this.classToFunctionName = initClassToFunctionType(); Map> qMap = new LinkedHashMap<>(); for (QueryMetric metric : QueryMetric.values()) { Map metricsMap = Maps.newLinkedHashMapWithExpectedSize(OperationType.values().length); @@ -56,6 +66,26 @@ public Metrics() { fMap.put(featureMetric, new CounterMetric()); } featuresMetrics = Collections.unmodifiableMap(fMap); + + functionMetrics = initFunctionMetrics(); + } + + private Map initFunctionMetrics() { + Map result = new LinkedHashMap<>(); + for (var entry : classToFunctionName.entrySet()) { + result.put(entry.getValue(), new CounterMetric()); + } + return Collections.unmodifiableMap(result); + } + + private Map, String> initClassToFunctionType() { + Map, String> tmp = new HashMap<>(); + for (FunctionDefinition func : functionRegistry.listFunctions()) { + if (tmp.containsKey(func.clazz()) == false) { + tmp.put(func.clazz(), func.name()); + } + } + return Collections.unmodifiableMap(tmp); } /** @@ -81,6 +111,13 @@ public void inc(FeatureMetric metric) { this.featuresMetrics.get(metric).inc(); } + public void incFunctionMetric(Class functionType) { + String functionName = classToFunctionName.get(functionType); + if (functionName != null) { + functionMetrics.get(functionName).inc(); + } + } + public Counters stats() { Counters counters = new Counters(); @@ -102,6 +139,11 @@ public Counters stats() { counters.inc(FPREFIX + entry.getKey().toString(), entry.getValue().count()); } + // function metrics + for (Entry entry : functionMetrics.entrySet()) { + counters.inc(FUNC_PREFIX + entry.getKey(), entry.getValue().count()); + } + return counters; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 8501dd6e478df..72060bccb520a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -143,7 +143,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics()) + new Verifier(new Metrics(new EsqlFunctionRegistry())) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index 760d8a327ad20..cf90cf96fe683 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -46,7 +46,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics()) + new Verifier(new Metrics(new EsqlFunctionRegistry())) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 203e5c3bd37ee..5e6588d2295f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -10,9 +10,14 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; @@ -32,6 +37,7 @@ import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { @@ -54,6 +60,8 @@ public void testDissectQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("concat", c)); } public void testEvalQuery() { @@ -73,6 +81,8 @@ public void testEvalQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("length", c)); } public void testGrokQuery() { @@ -92,6 +102,8 @@ public void testGrokQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("concat", c)); } public void testLimitQuery() { @@ -149,6 +161,8 @@ public void testStatsQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("max", c)); } public void testWhereQuery() { @@ -190,7 +204,7 @@ public void testTwoWhereQuery() { } public void testTwoQueriesExecuted() { - Metrics metrics = new Metrics(); + Metrics metrics = new Metrics(new EsqlFunctionRegistry()); Verifier verifier = new Verifier(metrics); esqlWithVerifier(""" from employees @@ -226,6 +240,64 @@ public void testTwoQueriesExecuted() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("length", c)); + assertEquals(1, function("concat", c)); + assertEquals(1, function("max", c)); + assertEquals(1, function("min", c)); + + assertEquals(0, function("sin", c)); + assertEquals(0, function("cos", c)); + } + + public void testMultipleFunctions() { + Metrics metrics = new Metrics(new EsqlFunctionRegistry()); + Verifier verifier = new Verifier(metrics); + esqlWithVerifier(""" + from employees + | where languages > 2 + | limit 5 + | eval name_len = length(first_name), surname_len = length(last_name) + | sort length(first_name) + | limit 3 + """, verifier); + + Counters c = metrics.stats(); + assertEquals(1, function("length", c)); + assertEquals(0, function("concat", c)); + + esqlWithVerifier(""" + from employees + | where languages > 2 + | sort first_name desc nulls first + | dissect concat(first_name, " ", last_name) "%{a} %{b}" + | grok concat(first_name, " ", last_name) "%{WORD:a} %{WORD:b}" + | eval name_len = length(first_name), surname_len = length(last_name) + | stats x = max(languages) + | sort x + | stats y = min(x) by x + """, verifier); + c = metrics.stats(); + + assertEquals(2, function("length", c)); + assertEquals(1, function("concat", c)); + assertEquals(1, function("max", c)); + assertEquals(1, function("min", c)); + + EsqlFunctionRegistry fr = new EsqlFunctionRegistry().snapshotRegistry(); + Map, String> functions = new HashMap<>(); + for (FunctionDefinition func : fr.listFunctions()) { + if (functions.containsKey(func.clazz()) == false) { + functions.put(func.clazz(), func.name()); + } + } + for (String value : functions.values()) { + if (Set.of("length", "concat", "max", "min").contains(value) == false) { + assertEquals(0, function(value, c)); + } + } + Map map = (Map) c.toNestedMap().get("functions"); + assertEquals(functions.size(), map.size()); } public void testEnrich() { @@ -251,6 +323,8 @@ public void testEnrich() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("to_string", c)); } public void testMvExpand() { @@ -298,6 +372,8 @@ public void testShowInfo() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + + assertEquals(1, function("count", c)); } public void testRow() { @@ -336,6 +412,8 @@ public void testDropAndRename() { assertEquals(1L, drop(c)); assertEquals(0, keep(c)); assertEquals(1L, rename(c)); + + assertEquals(1, function("count", c)); } public void testKeep() { @@ -422,6 +500,19 @@ private long rename(Counters c) { return c.get(FPREFIX + RENAME); } + private long function(String function, Counters c) { + return c.get(FUNC_PREFIX + function); + } + + private void assertNullFunction(String function, Counters c) { + try { + c.get(FUNC_PREFIX + function); + fail(); + } catch (NullPointerException npe) { + + } + } + private Counters esql(String esql) { return esql(esql, null); } @@ -434,7 +525,7 @@ private Counters esql(String esql, Verifier v) { Verifier verifier = v; Metrics metrics = null; if (v == null) { - metrics = new Metrics(); + metrics = new Metrics(new EsqlFunctionRegistry()); verifier = new Verifier(metrics); } analyzer(verifier).analyze(parser.createStatement(esql)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 8bbdb27a87d1a..e1fd9b0201a35 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -5,7 +5,7 @@ setup: - method: POST path: /_query parameters: [ method, path, parameters, capabilities ] - capabilities: [ no_meta ] + capabilities: [ function_stats ] reason: "META command removed which changes the count of the data returned" test_runner_features: [capabilities] @@ -51,11 +51,16 @@ setup: - set: {esql.queries.kibana.failed: kibana_failed_counter} - set: {esql.queries._all.total: all_total_counter} - set: {esql.queries._all.failed: all_failed_counter} + - set: {esql.functions.max: functions_max} + - set: {esql.functions.min: functions_min} + - set: {esql.functions.cos: functions_cos} + - set: {esql.functions.to_long: functions_to_long} + - set: {esql.functions.coalesce: functions_coalesce} - do: esql.query: body: - query: 'from test | where data > 2 | sort count desc | limit 5 | stats m = max(data)' + query: 'from test | where data > 2 and to_long(data) > 2 | sort count desc | limit 5 | stats m = max(data)' - do: {xpack.usage: {}} - match: { esql.available: true } @@ -73,3 +78,9 @@ setup: - match: {esql.queries.kibana.failed: $kibana_failed_counter} - gt: {esql.queries._all.total: $all_total_counter} - match: {esql.queries._all.failed: $all_failed_counter} + - gt: {esql.functions.max: $functions_max} + - match: {esql.functions.min: $functions_min} + - match: {esql.functions.cos: $functions_cos} + - gt: {esql.functions.to_long: $functions_to_long} + - match: {esql.functions.coalesce: $functions_coalesce} + - length: {esql.functions: 117} From ecf4af1e8895617f735195fb44b7f5c647c02afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 09:41:55 +0200 Subject: [PATCH 23/67] [DOCS] Documents watsonx service of the Inference API (#115088) Co-authored-by: Saikat Sarkar <132922331+saikatsarkar056@users.noreply.github.com> --- .../inference/delete-inference.asciidoc | 9 +- .../inference/get-inference.asciidoc | 9 +- .../inference/inference-apis.asciidoc | 1 + .../inference/post-inference.asciidoc | 9 +- .../inference/put-inference.asciidoc | 10 +- .../inference/service-watsonx-ai.asciidoc | 115 ++++++++++++++++++ .../inference/update-inference.asciidoc | 2 +- 7 files changed, 129 insertions(+), 26 deletions(-) create mode 100644 docs/reference/inference/service-watsonx-ai.asciidoc diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index bee39bf9b9851..4fc4beaca6d8e 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Deletes an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index c3fe841603bcc..d991729fe77c9 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Retrieves {infer} endpoint information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 88421e4f64cfd..e756831075027 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -54,3 +54,4 @@ include::service-google-vertex-ai.asciidoc[] include::service-hugging-face.asciidoc[] include::service-mistral.asciidoc[] include::service-openai.asciidoc[] +include::service-watsonx-ai.asciidoc[] diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 52131c0b10776..ce51abaff07f8 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -6,12 +6,9 @@ experimental[] Performs an inference task on an input text by using an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or -Hugging Face. For built-in models and models uploaded through Eland, the {infer} -APIs offer an alternative way to use and manage trained models. However, if you -do not plan to use the {infer} APIs to use these models or if you want to use -non-NLP models, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 96e127e741d56..6d6b61ffea771 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -8,13 +8,8 @@ Creates an {infer} endpoint to perform an {infer} task. [IMPORTANT] ==== -* The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, -Azure OpenAI, Google AI Studio, Google Vertex AI, Anthropic or Hugging Face. -* For built-in models and models uploaded through Eland, the {infer} APIs offer an -alternative way to use and manage trained models. However, if you do not plan to -use the {infer} APIs to use these models or if you want to use non-NLP models, -use the <>. +* The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. +* For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. ==== @@ -71,6 +66,7 @@ Click the links to review the configuration details of the services: * <> (`text_embedding`) * <> (`text_embedding`) * <> (`completion`, `text_embedding`) +* <> (`text_embedding`) The {es} and ELSER services run on a {ml} node in your {es} cluster. The rest of the services connect to external providers. \ No newline at end of file diff --git a/docs/reference/inference/service-watsonx-ai.asciidoc b/docs/reference/inference/service-watsonx-ai.asciidoc new file mode 100644 index 0000000000000..597afc27fd0cf --- /dev/null +++ b/docs/reference/inference/service-watsonx-ai.asciidoc @@ -0,0 +1,115 @@ +[[infer-service-watsonx-ai]] +=== Watsonx {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `watsonxai` service. + +You need an https://cloud.ibm.com/docs/databases-for-elasticsearch?topic=databases-for-elasticsearch-provisioning&interface=api[IBM Cloud® Databases for Elasticsearch deployment] to use the `watsonxai` {infer} service. +You can provision one through the https://cloud.ibm.com/databases/databases-for-elasticsearch/create[IBM catalog], the https://cloud.ibm.com/docs/databases-cli-plugin?topic=databases-cli-plugin-cdb-reference[Cloud Databases CLI plug-in], the https://cloud.ibm.com/apidocs/cloud-databases-api[Cloud Databases API], or https://registry.terraform.io/providers/IBM-Cloud/ibm/latest/docs/resources/database[Terraform]. + + +[discrete] +[[infer-service-watsonx-ai-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-watsonx-ai-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `text_embedding`. +-- + +[discrete] +[[infer-service-watsonx-ai-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`watsonxai`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `watsonxai` service. +-- + +`api_key`::: +(Required, string) +A valid API key of your Watsonx account. +You can find your Watsonx API keys or you can create a new one https://cloud.ibm.com/iam/apikeys[on the API keys page]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`api_version`::: +(Required, string) +Version parameter that takes a version date in the format of `YYYY-MM-DD`. +For the active version data parameters, refer to the https://cloud.ibm.com/apidocs/watsonx-ai#active-version-dates[documentation]. + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +Refer to the IBM Embedding Models section in the https://www.ibm.com/products/watsonx-ai/foundation-models[Watsonx documentation] for the list of available text embedding models. + +`url`::: +(Required, string) +The URL endpoint to use for the requests. + +`project_id`::: +(Required, string) +The name of the project to use for the {infer} task. + +`rate_limit`::: +(Optional, object) +By default, the `watsonxai` service sets the number of requests allowed per minute to `120`. +This helps to minimize the number of rate limit errors returned from Watsonx. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-watsonx-ai]] +==== Watsonx AI service example + +The following example shows how to create an {infer} endpoint called `watsonx-embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/watsonx-embeddings +{ + "service": "watsonxai", + "service_settings": { + "api_key": "", <1> + "url": "", <2> + "model_id": "ibm/slate-30m-english-rtrvr", + "project_id": "", <3> + "api_version": "2024-03-14" <4> + } +} + +------------------------------------------------------------ +// TEST[skip:TBD] +<1> A valid Watsonx API key. +You can find on the https://cloud.ibm.com/iam/apikeys[API keys page of your account]. +<2> The {infer} endpoint URL you created on Watsonx. +<3> The ID of your IBM Cloud project. +<4> A valid API version parameter. You can find the active version data parameters https://cloud.ibm.com/apidocs/watsonx-ai#active-version-dates[here]. \ No newline at end of file diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc index 166b002ea45f5..01a99d7f53062 100644 --- a/docs/reference/inference/update-inference.asciidoc +++ b/docs/reference/inference/update-inference.asciidoc @@ -6,7 +6,7 @@ experimental[] Updates an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. From 6be3036c01caffb8f82711e32f15ffbc6b8fda2d Mon Sep 17 00:00:00 2001 From: mccheah Date: Mon, 21 Oct 2024 01:28:44 -0700 Subject: [PATCH 24/67] Do not exclude empty arrays or empty objects in source filtering with Jackson streaming (#112250) --- docs/changelog/112250.yaml | 5 +++ .../filtering/FilterPathBasedFilter.java | 35 +++++++++++++++ .../search/lookup/SourceFilterTests.java | 44 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 docs/changelog/112250.yaml diff --git a/docs/changelog/112250.yaml b/docs/changelog/112250.yaml new file mode 100644 index 0000000000000..edbb5667d4b9d --- /dev/null +++ b/docs/changelog/112250.yaml @@ -0,0 +1,5 @@ +pr: 112250 +summary: Do not exclude empty arrays or empty objects in source filtering +area: Search +type: bug +issues: [109668] diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java index e0b5875c6c108..4562afa8af693 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/filtering/FilterPathBasedFilter.java @@ -96,6 +96,41 @@ public TokenFilter includeProperty(String name) { return filter; } + /** + * This is overridden in order to keep empty arrays in nested exclusions - see #109668. + *

+ * If we are excluding contents, we only want to exclude based on property name - but empty arrays in themselves do not have a property + * name. If the empty array were to be excluded, it should be done by excluding the parent. + *

+ * Note though that the expected behavior seems to be ambiguous if contentsFiltered is true - that is, that the filter has pruned all + * the contents of a given array, such that we are left with the empty array. The behavior below drops that array, for at the time of + * writing, not doing so would cause assertions in JsonXContentFilteringTests to fail, which expect this behavior. Yet it is not obvious + * if dropping the empty array in this case is correct. For example, one could expect this sort of behavior: + *

+ * From the user's perspective, this could reasonably yield either of: + *
    + *
  1. { "myArray": []}
  2. + *
  3. Removing {@code myArray} entirely.
  4. + *
+ */ + @Override + public boolean includeEmptyArray(boolean contentsFiltered) { + return inclusive == false && contentsFiltered == false; + } + + /** + * This is overridden in order to keep empty objects in nested exclusions - see #109668. + *

+ * The same logic applies to this as to {@link #includeEmptyArray(boolean)}, only for nested objects instead of nested arrays. + */ + @Override + public boolean includeEmptyObject(boolean contentsFiltered) { + return inclusive == false && contentsFiltered == false; + } + @Override protected boolean _includeScalar() { return inclusive == false; diff --git a/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java b/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java index 370584e3f29f5..bddfd53b2b120 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/SourceFilterTests.java @@ -112,4 +112,48 @@ public Source filter(SourceFilter sourceFilter) { } + // Verification for issue #109668 + public void testIncludeParentAndExcludeChildEmptyArray() { + Source fromMap = Source.fromMap(Map.of("myArray", List.of()), XContentType.JSON); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredMap.source(), Map.of("myArray", List.of())); + Source fromBytes = Source.fromBytes(new BytesArray("{\"myArray\": []}"), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredBytes.source(), Map.of("myArray", List.of())); + } + + public void testIncludeParentAndExcludeChildEmptyObject() { + Source fromMap = Source.fromMap(Map.of("myObject", Map.of()), XContentType.JSON); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredMap.source(), Map.of("myObject", Map.of())); + Source fromBytes = Source.fromBytes(new BytesArray("{\"myObject\": {}}"), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredBytes.source(), Map.of("myObject", Map.of())); + } + + public void testIncludeParentAndExcludeChildSubFieldsArrays() { + Source fromMap = Source.fromMap( + Map.of("myArray", List.of(Map.of("myField", "myValue", "other", "otherValue"))), + XContentType.JSON + ); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredMap.source(), Map.of("myArray", List.of(Map.of("other", "otherValue")))); + Source fromBytes = Source.fromBytes(new BytesArray(""" + { "myArray": [ { "myField": "myValue", "other": "otherValue" } ] }"""), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myArray" }, new String[] { "myArray.myField" })); + assertEquals(filteredBytes.source(), Map.of("myArray", List.of(Map.of("other", "otherValue")))); + } + + public void testIncludeParentAndExcludeChildSubFieldsObjects() { + Source fromMap = Source.fromMap( + Map.of("myObject", Map.of("myField", "myValue", "other", "otherValue")), + XContentType.JSON + ); + Source filteredMap = fromMap.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredMap.source(), Map.of("myObject", Map.of("other", "otherValue"))); + Source fromBytes = Source.fromBytes(new BytesArray(""" + { "myObject": { "myField": "myValue", "other": "otherValue" } }"""), XContentType.JSON); + Source filteredBytes = fromBytes.filter(new SourceFilter(new String[] { "myObject" }, new String[] { "myObject.myField" })); + assertEquals(filteredBytes.source(), Map.of("myObject", Map.of("other", "otherValue"))); + } } From 5645240976cd88ff9f1a30240c0923e9788f3f4c Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:32:07 +0300 Subject: [PATCH 25/67] SyntheticSourceIndexSettingsProvider restores stored source (#114978) * SyntheticSourceIndexSettingsProvider restores stored source * remove asserts * add and fix tests * fix test * more tests * fix assert * remove assert --- .../DisabledSecurityDataStreamTestCase.java | 1 + .../xpack/downsample/DownsampleRestIT.java | 2 +- .../xpack/logsdb/LogsdbRestIT.java | 34 ++++++++ .../xpack/logsdb/LogsdbRestIT.java | 3 + .../SyntheticSourceIndexSettingsProvider.java | 5 +- ...heticSourceIndexSettingsProviderTests.java | 86 ++++++++++++++++++- 6 files changed, 128 insertions(+), 3 deletions(-) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java index 9839f9abb080e..619bfd74d853c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java @@ -28,6 +28,7 @@ public abstract class DisabledSecurityDataStreamTestCase extends ESRestTestCase public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "false") .setting("xpack.watcher.enabled", "false") .build(); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java index 504326f1bd4b1..6794bc47fa3cd 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/DownsampleRestIT.java @@ -20,7 +20,7 @@ public class DownsampleRestIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "basic") + .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "false") .build(); diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 813a181045f2e..edecf4eb9669e 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; @@ -72,4 +73,37 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { } } + public void testLogsdbIndexGetsStoredSource() throws IOException { + final String index = "test-index"; + createIndex(index, Settings.builder().put("index.mode", "logsdb").build()); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException { + final String index = "test-index"; + String mapping = """ + { + "_source": { + "mode": "synthetic" + } + } + """; + createIndex(index, Settings.builder().put("index.mode", "logsdb").build(), mapping); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { + final String index = "test-index"; + createIndex( + index, + Settings.builder().put("index.mode", "logsdb").put("index.mapping.source.mode", SourceFieldMapper.Mode.SYNTHETIC).build() + ); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.SYNTHETIC.toString(), settings.get("index.mapping.source.mode")); + } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index b2d2978a254df..16759c3292f7a 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -75,6 +75,9 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { Map feature = features.stream().filter(map -> "mappings".equals(map.get("family"))).findFirst().get(); assertThat(feature.get("name"), equalTo("synthetic-source")); assertThat(feature.get("license_level"), equalTo("enterprise")); + + var settings = (Map) ((Map) getIndexSettings("test-index").get("test-index")).get("settings"); + assertNull(settings.get("index.mapping.source.mode")); // Default, no downgrading. } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index a190ff72de8df..f60c941c75a7c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.SourceFieldMapper; import java.io.IOException; import java.time.Instant; @@ -62,7 +63,9 @@ public Settings getAdditionalIndexSettings( if (newIndexHasSyntheticSourceUsage(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); - // TODO: handle falling back to stored source + return Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()) + .build(); } return Settings.EMPTY; } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 738487b9365a7..362b387726105 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -8,22 +8,42 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.List; +import static org.elasticsearch.common.settings.Settings.builder; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class SyntheticSourceIndexSettingsProviderTests extends ESTestCase { + private SyntheticSourceLicenseService syntheticSourceLicenseService; private SyntheticSourceIndexSettingsProvider provider; @Before public void setup() { - SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(any())).thenReturn(true); + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + syntheticSourceLicenseService.setLicenseState(licenseState); + provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()) @@ -226,4 +246,68 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } + public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws IOException { + String dataStreamName = "logs-app1"; + Metadata.Builder mb = Metadata.builder( + DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(Tuple.tuple(dataStreamName, 1)), + List.of(), + Instant.now().toEpochMilli(), + builder().build(), + 1 + ).getMetadata() + ); + Metadata metadata = mb.build(); + + Settings settings = builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + + Settings result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(0)); + + syntheticSourceLicenseService.setSyntheticSourceFallback(true); + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + null, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + IndexMode.TIME_SERIES, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + + result = provider.getAdditionalIndexSettings( + DataStream.getDefaultBackingIndexName(dataStreamName, 2), + dataStreamName, + IndexMode.LOGSDB, + metadata, + Instant.ofEpochMilli(1L), + settings, + List.of() + ); + assertThat(result.size(), equalTo(1)); + assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + } } From 78a43981b64d384ca5fb22ea0a94d327c05e5358 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 21 Oct 2024 11:18:26 +0200 Subject: [PATCH 26/67] Reprocess operator file settings on service start (#114295) Changes `FileSettingsService` to reprocess file settings on every restart or master node change, even if versions match between file and cluster-state metadata. If the file version is lower than the metadata version, processing is still skipped to avoid applying stale settings. This makes it easier for consumers of file settings to change their behavior w.r.t. file settings contents. For instance, an update of how role mappings are stored will automatically apply on the next restart, without the need to manually increment the file settings version to force reprocessing. Relates: ES-9628 --- docs/changelog/114295.yaml | 5 + .../FileSettingsRoleMappingUpgradeIT.java | 111 ++++++++++ .../service/FileSettingsServiceIT.java | 127 ++++++++++- .../file/AbstractFileWatchingService.java | 22 +- .../reservedstate/service/ErrorState.java | 19 +- .../service/FileSettingsService.java | 22 +- .../service/ReservedClusterStateService.java | 46 +++- .../service/ReservedStateErrorTask.java | 6 +- .../service/ReservedStateUpdateTask.java | 55 +++-- .../service/ReservedStateVersionCheck.java | 40 ++++ .../service/FileSettingsServiceTests.java | 68 +++++- .../ReservedClusterStateServiceTests.java | 120 +++++++++-- .../service/ReservedStateUpdateTaskTests.java | 10 +- .../ReservedLifecycleStateServiceTests.java | 9 +- .../RoleMappingFileSettingsIT.java | 12 +- .../FileSettingsRoleMappingsRestartIT.java | 200 ++++++++++++++---- ...vedSnapshotLifecycleStateServiceTests.java | 5 +- 17 files changed, 762 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/114295.yaml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java create mode 100644 server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java diff --git a/docs/changelog/114295.yaml b/docs/changelog/114295.yaml new file mode 100644 index 0000000000000..2acdc293a206c --- /dev/null +++ b/docs/changelog/114295.yaml @@ -0,0 +1,5 @@ +pr: 114295 +summary: "Reprocess operator file settings when settings service starts, due to node restart or master node change" +area: Infra/Settings +type: enhancement +issues: [ ] diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java new file mode 100644 index 0000000000000..3275f3e0e136f --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +public class FileSettingsRoleMappingUpgradeIT extends ParameterizedRollingUpgradeTestCase { + + private static final String settingsJSON = """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } } + } + } + } + }"""; + + private static final TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("path.repo", new Supplier<>() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "true") + // workaround to avoid having to set up clients and authorization headers + .setting("xpack.security.authc.anonymous.roles", "superuser") + .configFile("operator/settings.json", Resource.fromString(settingsJSON)) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + public FileSettingsRoleMappingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + @Before + public void checkVersions() { + assumeTrue( + "Only relevant when upgrading from a version before role mappings were stored in cluster state", + oldClusterHasFeature("gte_v8.4.0") && oldClusterHasFeature("gte_v8.15.0") == false + ); + } + + public void testRoleMappingsAppliedOnUpgrade() throws IOException { + if (isOldCluster()) { + Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); + List roleMappings = new XContentTestUtils.JsonMapView(entityAsMap(client().performRequest(clusterStateRequest))).get( + "metadata.role_mappings.role_mappings" + ); + assertThat(roleMappings, is(nullValue())); + } else if (isUpgradedCluster()) { + // the nodes have all been upgraded. Check they re-processed the role mappings in the settings file on + // upgrade + Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); + List roleMappings = new XContentTestUtils.JsonMapView(entityAsMap(client().performRequest(clusterStateRequest))).get( + "metadata.role_mappings.role_mappings" + ); + assertThat(roleMappings, is(not(nullValue()))); + assertThat(roleMappings.size(), equalTo(1)); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index c618e354802a7..f9122ccfb4a3e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -40,6 +41,7 @@ import static org.elasticsearch.test.NodeRoles.dataOnlyNode; import static org.elasticsearch.test.NodeRoles.masterNode; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -50,7 +52,12 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class FileSettingsServiceIT extends ESIntegTestCase { - private static final AtomicLong versionCounter = new AtomicLong(1); + private final AtomicLong versionCounter = new AtomicLong(1); + + @Before + public void resetVersionCounter() { + versionCounter.set(1); + } private static final String testJSON = """ { @@ -102,6 +109,19 @@ public class FileSettingsServiceIT extends ESIntegTestCase { } }"""; + private static final String testOtherErrorJSON = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "bad_cluster_settings": { + "search.allow_expensive_queries": "false" + } + } + }"""; + private void assertMasterNode(Client client, String node) { assertThat( client.admin().cluster().prepareState(TEST_REQUEST_TIMEOUT).get().getState().nodes().getMasterNode().getName(), @@ -109,8 +129,9 @@ private void assertMasterNode(Client client, String node) { ); } - public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger) throws Exception { - long version = versionCounter.incrementAndGet(); + public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger, boolean incrementVersion) + throws Exception { + long version = incrementVersion ? versionCounter.incrementAndGet() : versionCounter.get(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -124,6 +145,15 @@ public static void writeJSONFile(String node, String json, AtomicLong versionCou logger.info("--> After writing new settings file: [{}]", settingsFileContent); } + public static void writeJSONFile(String node, String json, AtomicLong versionCounter, Logger logger) throws Exception { + writeJSONFile(node, json, versionCounter, logger, true); + } + + public static void writeJSONFileWithoutVersionIncrement(String node, String json, AtomicLong versionCounter, Logger logger) + throws Exception { + writeJSONFile(node, json, versionCounter, logger, false); + } + private Tuple setupCleanupClusterStateListener(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); @@ -171,7 +201,10 @@ public void clusterChanged(ClusterChangedEvent event) { private void assertClusterStateSaveOK(CountDownLatch savedClusterState, AtomicLong metadataVersion, String expectedBytesPerSec) throws Exception { assertTrue(savedClusterState.await(20, TimeUnit.SECONDS)); + assertExpectedRecoveryBytesSettingAndVersion(metadataVersion, expectedBytesPerSec); + } + private static void assertExpectedRecoveryBytesSettingAndVersion(AtomicLong metadataVersion, String expectedBytesPerSec) { final ClusterStateResponse clusterStateResponse = clusterAdmin().state( new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(metadataVersion.get()) ).actionGet(); @@ -337,6 +370,77 @@ public void testErrorSaved() throws Exception { assertClusterStateNotSaved(savedClusterState.v1(), savedClusterState.v2()); } + public void testErrorCanRecoverOnRestart() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + FileSettingsService dataFileSettingsService = internalCluster().getInstance(FileSettingsService.class, dataNode); + + assertFalse(dataFileSettingsService.watching()); + + logger.info("--> start master node"); + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListenerForError(masterNode); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + assertFalse(dataFileSettingsService.watching()); + + writeJSONFile(masterNode, testErrorJSON, versionCounter, logger); + AtomicLong metadataVersion = savedClusterState.v2(); + assertClusterStateNotSaved(savedClusterState.v1(), metadataVersion); + assertHasErrors(metadataVersion, "not_cluster_settings"); + + // write valid json without version increment to simulate ES being able to process settings after a restart (usually, this would be + // due to a code change) + writeJSONFileWithoutVersionIncrement(masterNode, testJSON, versionCounter, logger); + internalCluster().restartNode(masterNode); + ensureGreen(); + + // we don't know the exact metadata version to wait for so rely on an assertBusy instead + assertBusy(() -> assertExpectedRecoveryBytesSettingAndVersion(metadataVersion, "50mb")); + assertBusy(() -> assertNoErrors(metadataVersion)); + } + + public void testNewErrorOnRestartReprocessing() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + FileSettingsService dataFileSettingsService = internalCluster().getInstance(FileSettingsService.class, dataNode); + + assertFalse(dataFileSettingsService.watching()); + + logger.info("--> start master node"); + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListenerForError(masterNode); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + assertFalse(dataFileSettingsService.watching()); + + writeJSONFile(masterNode, testErrorJSON, versionCounter, logger); + AtomicLong metadataVersion = savedClusterState.v2(); + assertClusterStateNotSaved(savedClusterState.v1(), metadataVersion); + assertHasErrors(metadataVersion, "not_cluster_settings"); + + // write json with new error without version increment to simulate ES failing to process settings after a restart for a new reason + // (usually, this would be due to a code change) + writeJSONFileWithoutVersionIncrement(masterNode, testOtherErrorJSON, versionCounter, logger); + assertHasErrors(metadataVersion, "not_cluster_settings"); + internalCluster().restartNode(masterNode); + ensureGreen(); + + assertBusy(() -> assertHasErrors(metadataVersion, "bad_cluster_settings")); + } + public void testSettingsAppliedOnMasterReElection() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); logger.info("--> start master node"); @@ -383,4 +487,21 @@ public void testSettingsAppliedOnMasterReElection() throws Exception { assertClusterStateSaveOK(savedClusterState.v1(), savedClusterState.v2(), "43mb"); } + private void assertHasErrors(AtomicLong waitForMetadataVersion, String expectedError) { + var errorMetadata = getErrorMetadata(waitForMetadataVersion); + assertThat(errorMetadata, is(notNullValue())); + assertThat(errorMetadata.errors(), containsInAnyOrder(containsString(expectedError))); + } + + private void assertNoErrors(AtomicLong waitForMetadataVersion) { + var errorMetadata = getErrorMetadata(waitForMetadataVersion); + assertThat(errorMetadata, is(nullValue())); + } + + private ReservedStateErrorMetadata getErrorMetadata(AtomicLong waitForMetadataVersion) { + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(waitForMetadataVersion.get()) + ).actionGet(); + return clusterStateResponse.getState().getMetadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE).errorMetadata(); + } } diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index dcb28a17a9b49..a900722397edd 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -77,6 +77,15 @@ public AbstractFileWatchingService(Path watchedFile) { protected abstract void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException; + /** + * Defaults to generic {@link #processFileChanges()} behavior. + * An implementation can override this to define different file handling when the file is processed during + * initial service start. + */ + protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { + processFileChanges(); + } + public final void addFileChangedListener(FileChangedListener listener) { eventListeners.add(listener); } @@ -174,7 +183,7 @@ protected final void watcherThread() { if (Files.exists(path)) { logger.debug("found initial operator settings file [{}], applying...", path); - processSettingsAndNotifyListeners(); + processSettingsOnServiceStartAndNotifyListeners(); } else { processInitialFileMissing(); // Notify everyone we don't have any initial file settings @@ -290,6 +299,17 @@ final WatchKey enableDirectoryWatcher(WatchKey previousKey, Path settingsDir) th } while (true); } + void processSettingsOnServiceStartAndNotifyListeners() throws InterruptedException { + try { + processFileOnServiceStart(); + for (var listener : eventListeners) { + listener.watchedFileChanged(); + } + } catch (IOException | ExecutionException e) { + logger.error(() -> "Error processing watched file: " + watchedFile(), e); + } + } + void processSettingsAndNotifyListeners() throws InterruptedException { try { processFileChanges(); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java index 1a58974985ba8..af0512b78cb7e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ErrorState.java @@ -15,9 +15,22 @@ import static org.elasticsearch.ExceptionsHelper.stackTrace; -record ErrorState(String namespace, Long version, List errors, ReservedStateErrorMetadata.ErrorKind errorKind) { - ErrorState(String namespace, Long version, Exception e, ReservedStateErrorMetadata.ErrorKind errorKind) { - this(namespace, version, List.of(stackTrace(e)), errorKind); +record ErrorState( + String namespace, + Long version, + ReservedStateVersionCheck versionCheck, + List errors, + ReservedStateErrorMetadata.ErrorKind errorKind +) { + + ErrorState( + String namespace, + Long version, + ReservedStateVersionCheck versionCheck, + Exception e, + ReservedStateErrorMetadata.ErrorKind errorKind + ) { + this(namespace, version, versionCheck, List.of(stackTrace(e)), errorKind); } public String toString() { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index c29f83c780d39..811b59465ce76 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -27,6 +27,8 @@ import java.nio.file.Files; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; +import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; /** @@ -115,20 +117,34 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { */ @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { - PlainActionFuture completion = new PlainActionFuture<>(); logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + processFileChanges(HIGHER_VERSION_ONLY); + } + + /** + * Read settings and pass them to {@link ReservedClusterStateService} for application. + * Settings will be reprocessed even if the cluster-state version equals that found in the settings file. + */ + @Override + protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { + logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + processFileChanges(HIGHER_OR_SAME_VERSION); + } + + private void processFileChanges(ReservedStateVersionCheck versionCheck) throws IOException, InterruptedException, ExecutionException { + PlainActionFuture completion = new PlainActionFuture<>(); try ( var fis = Files.newInputStream(watchedFile()); var bis = new BufferedInputStream(fis); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - stateService.process(NAMESPACE, parser, (e) -> completeProcessing(e, completion)); + stateService.process(NAMESPACE, parser, versionCheck, (e) -> completeProcessing(e, completion)); } completion.get(); } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + protected void processInitialFileMissing() throws ExecutionException, InterruptedException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 5571fcfb08544..0c5fa61b29cfe 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -110,7 +110,13 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { try { return stateChunkParser.apply(parser, null); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState( + namespace, + EMPTY_VERSION, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + e, + ReservedStateErrorMetadata.ErrorKind.PARSING + ); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -123,16 +129,22 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { * * @param namespace the namespace under which we'll store the reserved keys in the cluster state metadata * @param parser the XContentParser to process + * @param versionCheck determines if current and new versions of reserved state require processing or should be skipped * @param errorListener a consumer called with {@link IllegalStateException} if the content has errors and the * cluster state cannot be correctly applied, null if successful or state couldn't be applied because of incompatible version. */ - public void process(String namespace, XContentParser parser, Consumer errorListener) { + public void process( + String namespace, + XContentParser parser, + ReservedStateVersionCheck versionCheck, + Consumer errorListener + ) { ReservedStateChunk stateChunk; try { stateChunk = parse(namespace, parser); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, versionCheck, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -142,7 +154,7 @@ public void process(String namespace, XContentParser parser, Consumer return; } - process(namespace, stateChunk, errorListener); + process(namespace, stateChunk, versionCheck, errorListener); } public void initEmpty(String namespace, ActionListener listener) { @@ -153,6 +165,7 @@ public void initEmpty(String namespace, ActionListener lis new ReservedStateUpdateTask( namespace, emptyState, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), // error state should not be possible since there is no metadata being parsed or processed @@ -172,9 +185,14 @@ public void initEmpty(String namespace, ActionListener lis * @param errorListener a consumer called with {@link IllegalStateException} if the content has errors and the * cluster state cannot be correctly applied, null if successful or the state failed to apply because of incompatible version. */ - public void process(String namespace, ReservedStateChunk reservedStateChunk, Consumer errorListener) { + public void process( + String namespace, + ReservedStateChunk reservedStateChunk, + ReservedStateVersionCheck versionCheck, + Consumer errorListener + ) { Map reservedState = reservedStateChunk.state(); - final ReservedStateVersion reservedStateVersion = reservedStateChunk.metadata(); + ReservedStateVersion reservedStateVersion = reservedStateChunk.metadata(); LinkedHashSet orderedHandlers; try { @@ -183,6 +201,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con ErrorState errorState = new ErrorState( namespace, reservedStateVersion.version(), + versionCheck, e, ReservedStateErrorMetadata.ErrorKind.PARSING ); @@ -201,7 +220,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con // We check if we should exit early on the state version from clusterService. The ReservedStateUpdateTask // will check again with the most current state version if this continues. - if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion) == false) { + if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion, versionCheck) == false) { errorListener.accept(null); return; } @@ -209,7 +228,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con // We trial run all handler validations to ensure that we can process all of the cluster state error free. var trialRunErrors = trialRun(namespace, state, reservedStateChunk, orderedHandlers); // this is not using the modified trial state above, but that doesn't matter, we're just setting errors here - var error = checkAndReportError(namespace, trialRunErrors, reservedStateVersion); + var error = checkAndReportError(namespace, trialRunErrors, reservedStateVersion, versionCheck); if (error != null) { errorListener.accept(error); @@ -220,6 +239,7 @@ public void process(String namespace, ReservedStateChunk reservedStateChunk, Con new ReservedStateUpdateTask( namespace, reservedStateChunk, + versionCheck, handlers, orderedHandlers, ReservedClusterStateService.this::updateErrorState, @@ -233,7 +253,7 @@ public void onResponse(ActionResponse.Empty empty) { @Override public void onFailure(Exception e) { // Don't spam the logs on repeated errors - if (isNewError(existingMetadata, reservedStateVersion.version())) { + if (isNewError(existingMetadata, reservedStateVersion.version(), versionCheck)) { logger.debug("Failed to apply reserved cluster state", e); errorListener.accept(e); } else { @@ -247,7 +267,12 @@ public void onFailure(Exception e) { } // package private for testing - Exception checkAndReportError(String namespace, List errors, ReservedStateVersion reservedStateVersion) { + Exception checkAndReportError( + String namespace, + List errors, + ReservedStateVersion reservedStateVersion, + ReservedStateVersionCheck versionCheck + ) { // Any errors should be discovered through validation performed in the transform calls if (errors.isEmpty() == false) { logger.debug("Error processing state change request for [{}] with the following errors [{}]", namespace, errors); @@ -255,6 +280,7 @@ Exception checkAndReportError(String namespace, List errors, ReservedSta var errorState = new ErrorState( namespace, reservedStateVersion.version(), + versionCheck, errors, ReservedStateErrorMetadata.ErrorKind.VALIDATION ); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java index 9296981e64d2d..e9fb736608d53 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java @@ -51,10 +51,10 @@ ActionListener listener() { } // package private for testing - static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion) { + static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion, ReservedStateVersionCheck versionCheck) { return (existingMetadata == null || existingMetadata.errorMetadata() == null - || existingMetadata.errorMetadata().version() < newStateVersion + || versionCheck.test(existingMetadata.errorMetadata().version(), newStateVersion) || newStateVersion.equals(RESTORED_VERSION) || newStateVersion.equals(EMPTY_VERSION) || newStateVersion.equals(NO_VERSION)); @@ -63,7 +63,7 @@ static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateV static boolean checkErrorVersion(ClusterState currentState, ErrorState errorState) { ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(errorState.namespace()); // check for noop here - if (isNewError(existingMetadata, errorState.version()) == false) { + if (isNewError(existingMetadata, errorState.version(), errorState.versionCheck()) == false) { logger.info( () -> format( "Not updating error state because version [%s] is less or equal to the last state error version [%s]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 17d4de65506ff..92e248f160f0f 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -47,6 +47,7 @@ public class ReservedStateUpdateTask implements ClusterStateTaskListener { private final String namespace; private final ReservedStateChunk stateChunk; + private final ReservedStateVersionCheck versionCheck; private final Map> handlers; private final Collection orderedHandlers; private final Consumer errorReporter; @@ -55,6 +56,7 @@ public class ReservedStateUpdateTask implements ClusterStateTaskListener { public ReservedStateUpdateTask( String namespace, ReservedStateChunk stateChunk, + ReservedStateVersionCheck versionCheck, Map> handlers, Collection orderedHandlers, Consumer errorReporter, @@ -62,6 +64,7 @@ public ReservedStateUpdateTask( ) { this.namespace = namespace; this.stateChunk = stateChunk; + this.versionCheck = versionCheck; this.handlers = handlers; this.orderedHandlers = orderedHandlers; this.errorReporter = errorReporter; @@ -89,7 +92,7 @@ protected ClusterState execute(final ClusterState currentState) { Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); - if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion) == false) { + if (checkMetadataVersion(namespace, existingMetadata, reservedStateVersion, versionCheck) == false) { return currentState; } @@ -110,7 +113,7 @@ protected ClusterState execute(final ClusterState currentState) { } } - checkAndThrowOnError(errors, reservedStateVersion); + checkAndThrowOnError(errors, reservedStateVersion, versionCheck); // Remove the last error if we had previously encountered any in prior processing of reserved state reservedMetadataBuilder.errorMetadata(null); @@ -121,14 +124,15 @@ protected ClusterState execute(final ClusterState currentState) { return stateBuilder.metadata(metadataBuilder).build(); } - private void checkAndThrowOnError(List errors, ReservedStateVersion reservedStateVersion) { + private void checkAndThrowOnError(List errors, ReservedStateVersion version, ReservedStateVersionCheck versionCheck) { // Any errors should be discovered through validation performed in the transform calls if (errors.isEmpty() == false) { logger.debug("Error processing state change request for [{}] with the following errors [{}]", namespace, errors); var errorState = new ErrorState( namespace, - reservedStateVersion.version(), + version.version(), + versionCheck, errors, ReservedStateErrorMetadata.ErrorKind.VALIDATION ); @@ -155,7 +159,8 @@ static Set keysForHandler(ReservedStateMetadata reservedStateMetadata, S static boolean checkMetadataVersion( String namespace, ReservedStateMetadata existingMetadata, - ReservedStateVersion reservedStateVersion + ReservedStateVersion reservedStateVersion, + ReservedStateVersionCheck versionCheck ) { if (Version.CURRENT.before(reservedStateVersion.minCompatibleVersion())) { logger.warn( @@ -168,35 +173,45 @@ static boolean checkMetadataVersion( return false; } - if (reservedStateVersion.version().equals(ReservedStateMetadata.EMPTY_VERSION)) { + Long newVersion = reservedStateVersion.version(); + if (newVersion.equals(ReservedStateMetadata.EMPTY_VERSION)) { return true; } // require a regular positive version, reject any special version - if (reservedStateVersion.version() <= 0L) { + if (newVersion <= 0L) { logger.warn( () -> format( "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal to 0", namespace, - reservedStateVersion.version() + newVersion ) ); return false; } - if (existingMetadata != null && existingMetadata.version() >= reservedStateVersion.version()) { - logger.warn( - () -> format( - "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal" - + " to the current metadata version [%s]", - namespace, - reservedStateVersion.version(), - existingMetadata.version() - ) - ); - return false; + if (existingMetadata == null) { + return true; + } + + Long currentVersion = existingMetadata.version(); + if (versionCheck.test(currentVersion, newVersion)) { + return true; } - return true; + logger.warn( + () -> format( + "Not updating reserved cluster state for namespace [%s], because version [%s] is %s the current metadata version [%s]", + namespace, + newVersion, + switch (versionCheck) { + case ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION -> "less than"; + case ReservedStateVersionCheck.HIGHER_VERSION_ONLY -> "less than or equal to"; + }, + currentVersion + ) + ); + return false; } + } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java new file mode 100644 index 0000000000000..6907331edf1d6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateVersionCheck.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import java.util.function.BiPredicate; + +/** + * Enum representing the logic for determining whether a reserved state should be processed + * based on the current and new versions. + */ +public enum ReservedStateVersionCheck implements BiPredicate { + /** + * Returns {@code true} if the new version is higher than the current version. + * This is the default behavior when processing changes to file settings. + */ + HIGHER_VERSION_ONLY { + @Override + public boolean test(Long currentVersion, Long newVersion) { + return currentVersion < newVersion; + } + }, + /** + * Returns {@code true} if the new version is higher or equal to the current version. + * This allows re-processing of the same version. + * Used when processing file settings during service startup. + */ + HIGHER_OR_SAME_VERSION { + @Override + public boolean test(Long currentVersion, Long newVersion) { + return currentVersion <= newVersion; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index aa6a9667ce39e..8ee2754427dda 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -54,6 +54,7 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.hasEntry; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -148,9 +149,9 @@ public void testOperatorDirName() { @SuppressWarnings("unchecked") public void testInitialFileError() throws Exception { doAnswer((Answer) invocation -> { - ((Consumer) invocation.getArgument(2)).accept(new IllegalStateException("Some exception")); + ((Consumer) invocation.getArgument(3)).accept(new IllegalStateException("Some exception")); return null; - }).when(controller).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), eq(randomFrom(ReservedStateVersionCheck.values())), any()); AtomicBoolean settingsChanged = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); @@ -163,7 +164,7 @@ public void testInitialFileError() throws Exception { } finally { latch.countDown(); } - }).when(fileSettingsService).processFileChanges(); + }).when(fileSettingsService).processFileOnServiceStart(); Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist @@ -175,7 +176,8 @@ public void testInitialFileError() throws Exception { // wait until the watcher thread has started, and it has discovered the file assertTrue(latch.await(20, TimeUnit.SECONDS)); - verify(fileSettingsService, times(1)).processFileChanges(); + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); } @@ -184,9 +186,9 @@ public void testInitialFileError() throws Exception { public void testInitialFileWorks() throws Exception { // Let's check that if we didn't throw an error that everything works doAnswer((Answer) invocation -> { - ((Consumer) invocation.getArgument(2)).accept(null); + ((Consumer) invocation.getArgument(3)).accept(null); return null; - }).when(controller).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), any(), any()); CountDownLatch latch = new CountDownLatch(1); @@ -196,13 +198,67 @@ public void testInitialFileWorks() throws Exception { // contents of the JSON don't matter, we just need a file to exist writeTestFile(fileSettingsService.watchedFile(), "{}"); + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileOnServiceStart(); + fileSettingsService.start(); fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); // wait for listener to be called assertTrue(latch.await(20, TimeUnit.SECONDS)); + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + } + + @SuppressWarnings("unchecked") + public void testProcessFileChanges() throws Exception { + doAnswer((Answer) invocation -> { + ((Consumer) invocation.getArgument(3)).accept(null); + return null; + }).when(controller).process(any(), any(XContentParser.class), any(), any()); + + // we get three events: initial clusterChanged event, first write, second write + CountDownLatch latch = new CountDownLatch(3); + + fileSettingsService.addFileChangedListener(latch::countDown); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + // contents of the JSON don't matter, we just need a file to exist + writeTestFile(fileSettingsService.watchedFile(), "{}"); + + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileOnServiceStart(); + doAnswer((Answer) invocation -> { + try { + return invocation.callRealMethod(); + } finally { + latch.countDown(); + } + }).when(fileSettingsService).processFileChanges(); + + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + // second file change; contents still don't matter + writeTestFile(fileSettingsService.watchedFile(), "{}"); + + // wait for listener to be called (once for initial processing, once for subsequent update) + assertTrue(latch.await(20, TimeUnit.SECONDS)); + + verify(fileSettingsService, times(1)).processFileOnServiceStart(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); verify(fileSettingsService, times(1)).processFileChanges(); + verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); } @SuppressWarnings("unchecked") diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java index 217b82d7729ae..d96387618e6bd 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java @@ -167,7 +167,12 @@ public void testOperatorController() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process( + "operator", + parser, + randomFrom(ReservedStateVersionCheck.HIGHER_VERSION_ONLY, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), + x::set + ); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -197,7 +202,12 @@ public void testOperatorController() throws IOException { """; try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process( + "operator", + parser, + randomFrom(ReservedStateVersionCheck.HIGHER_VERSION_ONLY, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), + Assert::assertNull + ); } } @@ -236,7 +246,15 @@ public void testUpdateStateTasks() throws Exception { AtomicBoolean successCalled = new AtomicBoolean(false); ReservedStateUpdateTask task = spy( - new ReservedStateUpdateTask("test", null, Map.of(), Set.of(), errorState -> {}, ActionListener.noop()) + new ReservedStateUpdateTask( + "test", + null, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + Set.of(), + errorState -> {}, + ActionListener.noop() + ) ); doReturn(state).when(task).execute(any()); @@ -275,7 +293,13 @@ public void testUpdateErrorState() { ReservedClusterStateService service = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of()); - ErrorState error = new ErrorState("namespace", 2L, List.of("error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + ErrorState error = new ErrorState( + "namespace", + 2L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("error"), + ReservedStateErrorMetadata.ErrorKind.TRANSIENT + ); service.updateErrorState(error); assertThat(updateTask.getValue(), notNullValue()); @@ -296,7 +320,13 @@ public void testUpdateErrorState() { // it should not update if the error version is less than the current version when(clusterService.state()).thenReturn(updatedState); - ErrorState oldError = new ErrorState("namespace", 1L, List.of("old error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + ErrorState oldError = new ErrorState( + "namespace", + 1L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("old error"), + ReservedStateErrorMetadata.ErrorKind.TRANSIENT + ); service.updateErrorState(oldError); verifyNoMoreInteractions(errorQueue); } @@ -308,7 +338,13 @@ public void testErrorStateTask() throws Exception { ReservedStateErrorTask task = spy( new ReservedStateErrorTask( - new ErrorState("test", 1L, List.of("some parse error", "some io error"), ReservedStateErrorMetadata.ErrorKind.PARSING), + new ErrorState( + "test", + 1L, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + List.of("some parse error", "some io error"), + ReservedStateErrorMetadata.ErrorKind.PARSING + ), ActionListener.running(() -> listenerCompleted.set(true)) ) ); @@ -353,10 +389,12 @@ public TransformState transform(Object source, TransformState prevState) throws Metadata metadata = Metadata.builder().put(operatorMetadata).build(); ClusterState state = ClusterState.builder(new ClusterName("test")).metadata(metadata).build(); - assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 2L)); - assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 1L)); - assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 3L)); - assertTrue(ReservedStateErrorTask.isNewError(null, 1L)); + assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 2L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, 1L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 2L, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION)); + assertTrue(ReservedStateErrorTask.isNewError(operatorMetadata, 3L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); + assertTrue(ReservedStateErrorTask.isNewError(null, 1L, ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION)); var chunk = new ReservedStateChunk(Map.of("one", "two", "maker", "three"), new ReservedStateVersion(2L, Version.CURRENT)); var orderedHandlers = List.of(exceptionThrower.name(), newStateMaker.name()); @@ -367,9 +405,10 @@ public TransformState transform(Object source, TransformState prevState) throws ReservedStateUpdateTask task = new ReservedStateUpdateTask( "namespace_one", chunk, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(exceptionThrower.name(), exceptionThrower, newStateMaker.name(), newStateMaker), orderedHandlers, - errorState -> assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, errorState.version())), + errorState -> assertFalse(ReservedStateErrorTask.isNewError(operatorMetadata, errorState.version(), errorState.versionCheck())), ActionListener.noop() ); @@ -414,9 +453,21 @@ public void testCheckMetadataVersion() { ReservedStateMetadata operatorMetadata = ReservedStateMetadata.builder("test").version(123L).build(); ClusterState state = ClusterState.builder(new ClusterName("test")).metadata(Metadata.builder().put(operatorMetadata)).build(); + ReservedStateUpdateTask task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should be modified", task.execute(state), not(sameInstance(state))); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), e -> {}, @@ -427,16 +478,59 @@ public void testCheckMetadataVersion() { task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, Map.of(), List.of(), e -> {}, ActionListener.noop() ); assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(123L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should be modified", task.execute(state), not(sameInstance(state))); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(122L, Version.CURRENT)), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); + + task = new ReservedStateUpdateTask( + "test", + new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); + assertThat("Cluster state should not be modified", task.execute(state), sameInstance(state)); task = new ReservedStateUpdateTask( "test", new ReservedStateChunk(Map.of(), new ReservedStateVersion(124L, Version.fromId(Version.CURRENT.id + 1))), + ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION, Map.of(), List.of(), e -> {}, @@ -530,11 +624,11 @@ public void testCheckAndReportError() { final var controller = spy(new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of())); - assertNull(controller.checkAndReportError("test", List.of(), null)); + assertNull(controller.checkAndReportError("test", List.of(), null, ReservedStateVersionCheck.HIGHER_VERSION_ONLY)); verify(controller, times(0)).updateErrorState(any()); var version = new ReservedStateVersion(2L, Version.CURRENT); - var error = controller.checkAndReportError("test", List.of("test error"), version); + var error = controller.checkAndReportError("test", List.of("test error"), version, ReservedStateVersionCheck.HIGHER_VERSION_ONLY); assertThat(error, instanceOf(IllegalStateException.class)); assertThat(error.getMessage(), is("Error processing state change request for test, errors: test error")); verify(controller, times(1)).updateErrorState(any()); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java index 9a2ab779669bc..1f453abf32303 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -23,7 +23,15 @@ public class ReservedStateUpdateTaskTests extends ESTestCase { public void testBlockedClusterState() { - var task = new ReservedStateUpdateTask("dummy", null, Map.of(), List.of(), e -> {}, ActionListener.noop()); + var task = new ReservedStateUpdateTask( + "dummy", + null, + ReservedStateVersionCheck.HIGHER_VERSION_ONLY, + Map.of(), + List.of(), + e -> {}, + ActionListener.noop() + ); ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) .build(); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java index 3f3285c5c2bd7..aab89c6620b52 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.reservedstate.service.ReservedStateUpdateTask; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTaskExecutor; import org.elasticsearch.reservedstate.service.ReservedStateVersion; +import org.elasticsearch.reservedstate.service.ReservedStateVersionCheck; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -362,7 +363,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -383,7 +384,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } @@ -420,7 +421,7 @@ public void testOperatorControllerWithPluginPackage() { new ReservedStateVersion(123L, Version.CURRENT) ); - controller.process("operator", pack, x::set); + controller.process("operator", pack, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -439,6 +440,6 @@ public void testOperatorControllerWithPluginPackage() { ) ); - controller.process("operator", pack, Assert::assertNull); + controller.process("operator", pack, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 778d88d832887..3b6ffd0698623 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -154,7 +154,17 @@ public void cleanUp() { } public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { - long version = versionCounter.incrementAndGet(); + writeJSONFile(node, json, logger, versionCounter, true); + } + + public static void writeJSONFileWithoutVersionIncrement(String node, String json, Logger logger, AtomicLong versionCounter) + throws Exception { + writeJSONFile(node, json, logger, versionCounter, false); + } + + private static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter, boolean incrementVersion) + throws Exception { + long version = incrementVersion ? versionCounter.incrementAndGet() : versionCounter.get(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); assertTrue(fileSettingsService.watching()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java index c0f82adc88784..6c6582138ce89 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java @@ -16,25 +16,33 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; +import org.junit.Before; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListener; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListenerForCleanup; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFile; +import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFileWithoutVersionIncrement; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyIterable; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class FileSettingsRoleMappingsRestartIT extends SecurityIntegTestCase { - private static AtomicLong versionCounter = new AtomicLong(1); + private final AtomicLong versionCounter = new AtomicLong(1); - private static String testJSONOnlyRoleMappings = """ + @Before + public void resetVersion() { + versionCounter.set(1); + } + + private static final String testJSONOnlyRoleMappings = """ { "metadata": { "version": "%s", @@ -64,7 +72,28 @@ public class FileSettingsRoleMappingsRestartIT extends SecurityIntegTestCase { } }"""; - private static String emptyJSON = """ + private static final String testJSONOnlyUpdatedRoleMappings = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana_together": { + "enabled": true, + "roles": [ "kibana_user", "kibana_admin" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something" + } + } + } + } + }"""; + + private static final String emptyJSON = """ { "metadata": { "version": "%s", @@ -88,12 +117,34 @@ public void testReservedStatePersistsOnRestart() throws Exception { boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - var clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState, "everyone_kibana_alone", "everyone_fleet_alone"); - List roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat( - roleMappings, - containsInAnyOrder( + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + + logger.info("--> restart master"); + internalCluster().restartNode(masterNode); + ensureGreen(); + awaitFileSettingsWatcher(); + + // assert busy to give mappings time to update after restart; otherwise, the role mapping names might be dummy values + // `name_not_available_after_deserialization` + assertBusy( + () -> assertRoleMappingsInClusterState( new ExpressionRoleMapping( "everyone_kibana_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), @@ -113,59 +164,118 @@ public void testReservedStatePersistsOnRestart() throws Exception { ) ); + // now remove the role mappings via the same settings file + cleanupClusterState(masterNode); + + // no role mappings + assertRoleMappingsInClusterState(); + + // and restart the master to confirm the role mappings are all gone + logger.info("--> restart master again"); + internalCluster().restartNode(masterNode); + ensureGreen(); + + // no role mappings + assertRoleMappingsInClusterState(); + } + + public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + + final String masterNode = internalCluster().getMasterName(); + + var savedClusterState = setupClusterStateListener(masterNode, "everyone_kibana_alone"); + awaitFileSettingsWatcher(); + logger.info("--> write some role mappings, no other file settings"); + writeJSONFile(masterNode, testJSONOnlyRoleMappings, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + + final CountDownLatch latch = new CountDownLatch(1); + final FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + fileSettingsService.addFileChangedListener(latch::countDown); + // Don't increment version but write new file contents to test re-processing on restart + writeJSONFileWithoutVersionIncrement(masterNode, testJSONOnlyUpdatedRoleMappings, logger, versionCounter); + // Make sure we saw a file settings update so that we know it got processed, but it did not affect cluster state + assertTrue(latch.await(20, TimeUnit.SECONDS)); + + // Nothing changed yet because version is the same and there was no restart + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + false + ) + ); + logger.info("--> restart master"); internalCluster().restartNode(masterNode); ensureGreen(); + awaitFileSettingsWatcher(); - // assert role mappings are recovered from "disk" - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState, "everyone_kibana_alone", "everyone_fleet_alone"); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat( - roleMappings, - containsInAnyOrder( + // Assert busy to give mappings time to update + assertBusy( + () -> assertRoleMappingsInClusterState( new ExpressionRoleMapping( - "name_not_available_after_deserialization", + "everyone_kibana_together", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("kibana_user"), + List.of("kibana_user", "kibana_admin"), List.of(), Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), true - ), - new ExpressionRoleMapping( - "name_not_available_after_deserialization", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("fleet_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), - false ) ) ); + cleanupClusterState(masterNode); + } + + private void assertRoleMappingsInClusterState(ExpressionRoleMapping... expectedRoleMappings) { + var clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); + String[] expectedRoleMappingNames = Arrays.stream(expectedRoleMappings).map(ExpressionRoleMapping::getName).toArray(String[]::new); + assertRoleMappingReservedMetadata(clusterState, expectedRoleMappingNames); + var actualRoleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); + assertThat(actualRoleMappings, containsInAnyOrder(expectedRoleMappings)); + } + + private void cleanupClusterState(String masterNode) throws Exception { // now remove the role mappings via the same settings file - savedClusterState = setupClusterStateListenerForCleanup(masterNode); + var savedClusterState = setupClusterStateListenerForCleanup(masterNode); awaitFileSettingsWatcher(); logger.info("--> remove the role mappings with an empty settings file"); writeJSONFile(masterNode, emptyJSON, logger, versionCounter); - awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat(roleMappings, emptyIterable()); - - // and restart the master to confirm the role mappings are all gone - logger.info("--> restart master again"); - internalCluster().restartNode(masterNode); - ensureGreen(); - - // assert empty role mappings are recovered from "disk" - clusterState = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(); - assertRoleMappingReservedMetadata(clusterState); - roleMappings = new ArrayList<>(RoleMappingMetadata.getFromClusterState(clusterState).getRoleMappings()); - assertThat(roleMappings, emptyIterable()); } private void assertRoleMappingReservedMetadata(ClusterState clusterState, String... names) { diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java index 0fcc4b8007c6d..b993633e3d17d 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.reservedstate.service.ReservedClusterStateService; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTask; import org.elasticsearch.reservedstate.service.ReservedStateUpdateTaskExecutor; +import org.elasticsearch.reservedstate.service.ReservedStateVersionCheck; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -399,7 +400,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { AtomicReference x = new AtomicReference<>(); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, x::set); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), x::set); assertThat(x.get(), instanceOf(IllegalStateException.class)); assertThat(x.get().getMessage(), containsString("Error processing state change request for operator")); @@ -419,7 +420,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ); try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, testJSON)) { - controller.process("operator", parser, Assert::assertNull); + controller.process("operator", parser, randomFrom(ReservedStateVersionCheck.values()), Assert::assertNull); } } From d5a19578772c7f5d9eb12f774d9040fbdfb48e30 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 11:30:08 +0200 Subject: [PATCH 27/67] ES|QL: remove dead code for LIKE operator (#115037) --- .../core/expression/predicate/regex/Like.java | 46 --------- .../predicate/regex/LikePattern.java | 95 ------------------ .../core/planner/ExpressionTranslators.java | 4 - .../predicate/regex/StringPatternTests.java | 98 +++++++++---------- .../rules/logical/ConstantFoldingTests.java | 3 - .../rules/logical/ReplaceRegexMatchTests.java | 36 +------ .../esql/tree/EsqlNodeSubclassTests.java | 8 -- 7 files changed, 54 insertions(+), 236 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java deleted file mode 100644 index 6d8ce8cbdf47f..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.regex; - -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; - -public class Like extends RegexMatch { - - public Like(Source source, Expression left, LikePattern pattern) { - this(source, left, pattern, false); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public String getWriteableName() { - throw new UnsupportedOperationException(); - } - - public Like(Source source, Expression left, LikePattern pattern, boolean caseInsensitive) { - super(source, left, pattern, caseInsensitive); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Like::new, field(), pattern(), caseInsensitive()); - } - - @Override - protected Like replaceChild(Expression newLeft) { - return new Like(source(), newLeft, pattern(), caseInsensitive()); - } - -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java deleted file mode 100644 index 52ce2636e914b..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.regex; - -import org.apache.lucene.index.Term; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.xpack.esql.core.util.StringUtils; - -import java.util.Objects; - -/** - * A SQL 'like' pattern. - * Similar to basic regex, supporting '_' instead of '?' and '%' instead of '*'. - *

- * Allows escaping based on a regular char. - * - * To prevent conflicts with ES, the string and char must be validated to not contain '*'. - */ -public class LikePattern extends AbstractStringPattern { - - private final String pattern; - private final char escape; - - private final String regex; - private final String wildcard; - private final String indexNameWildcard; - - public LikePattern(String pattern, char escape) { - this.pattern = pattern; - this.escape = escape; - // early initialization to force string validation - this.regex = StringUtils.likeToJavaPattern(pattern, escape); - this.wildcard = StringUtils.likeToLuceneWildcard(pattern, escape); - this.indexNameWildcard = StringUtils.likeToIndexWildcard(pattern, escape); - } - - public String pattern() { - return pattern; - } - - public char escape() { - return escape; - } - - @Override - public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - } - - @Override - public String asJavaRegex() { - return regex; - } - - /** - * Returns the pattern in (Lucene) wildcard format. - */ - public String asLuceneWildcard() { - return wildcard; - } - - /** - * Returns the pattern in (IndexNameExpressionResolver) wildcard format. - */ - public String asIndexNameWildcard() { - return indexNameWildcard; - } - - @Override - public int hashCode() { - return Objects.hash(pattern, escape); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - LikePattern other = (LikePattern) obj; - return Objects.equals(pattern, other.pattern) && escape == other.escape; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index 176250222512b..366630eadb5fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -66,9 +65,6 @@ public static Query doTranslate(RegexMatch e, TranslatorHandler handler) { } private static Query translateField(RegexMatch e, String targetFieldName) { - if (e instanceof Like l) { - return new WildcardQuery(e.source(), targetFieldName, l.pattern().asLuceneWildcard(), l.caseInsensitive()); - } if (e instanceof WildcardLike l) { return new WildcardQuery(e.source(), targetFieldName, l.pattern().asLuceneWildcard(), l.caseInsensitive()); } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java index 43cae475cff7e..c361b7e3726ed 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/StringPatternTests.java @@ -12,78 +12,78 @@ public class StringPatternTests extends ESTestCase { - private LikePattern like(String pattern, char escape) { - return new LikePattern(pattern, escape); + private WildcardPattern like(String pattern) { + return new WildcardPattern(pattern); } private RLikePattern rlike(String pattern) { return new RLikePattern(pattern); } - private boolean matchesAll(String pattern, char escape) { - return like(pattern, escape).matchesAll(); + private boolean likeMatchesAll(String pattern) { + return like(pattern).matchesAll(); } - private boolean exactMatch(String pattern, char escape) { - String escaped = pattern.replace(Character.toString(escape), StringUtils.EMPTY); - return escaped.equals(like(pattern, escape).exactMatch()); + private boolean likeExactMatch(String pattern) { + String escaped = pattern.replace("\\", StringUtils.EMPTY); + return escaped.equals(like(pattern).exactMatch()); } - private boolean matchesAll(String pattern) { + private boolean rlikeMatchesAll(String pattern) { return rlike(pattern).matchesAll(); } - private boolean exactMatch(String pattern) { + private boolean rlikeExactMatch(String pattern) { return pattern.equals(rlike(pattern).exactMatch()); } - public void testWildcardMatchAll() throws Exception { - assertTrue(matchesAll("%", '0')); - assertTrue(matchesAll("%%", '0')); + public void testWildcardMatchAll() { + assertTrue(likeMatchesAll("*")); + assertTrue(likeMatchesAll("**")); - assertFalse(matchesAll("a%", '0')); - assertFalse(matchesAll("%_", '0')); - assertFalse(matchesAll("%_%_%", '0')); - assertFalse(matchesAll("_%", '0')); - assertFalse(matchesAll("0%", '0')); + assertFalse(likeMatchesAll("a*")); + assertFalse(likeMatchesAll("*?")); + assertFalse(likeMatchesAll("*?*?*")); + assertFalse(likeMatchesAll("?*")); + assertFalse(likeMatchesAll("\\*")); } - public void testRegexMatchAll() throws Exception { - assertTrue(matchesAll(".*")); - assertTrue(matchesAll(".*.*")); - assertTrue(matchesAll(".*.?")); - assertTrue(matchesAll(".?.*")); - assertTrue(matchesAll(".*.?.*")); + public void testRegexMatchAll() { + assertTrue(rlikeMatchesAll(".*")); + assertTrue(rlikeMatchesAll(".*.*")); + assertTrue(rlikeMatchesAll(".*.?")); + assertTrue(rlikeMatchesAll(".?.*")); + assertTrue(rlikeMatchesAll(".*.?.*")); - assertFalse(matchesAll("..*")); - assertFalse(matchesAll("ab.")); - assertFalse(matchesAll("..?")); + assertFalse(rlikeMatchesAll("..*")); + assertFalse(rlikeMatchesAll("ab.")); + assertFalse(rlikeMatchesAll("..?")); } - public void testWildcardExactMatch() throws Exception { - assertTrue(exactMatch("0%", '0')); - assertTrue(exactMatch("0_", '0')); - assertTrue(exactMatch("123", '0')); - assertTrue(exactMatch("1230_", '0')); - assertTrue(exactMatch("1230_321", '0')); - - assertFalse(exactMatch("%", '0')); - assertFalse(exactMatch("%%", '0')); - assertFalse(exactMatch("a%", '0')); - assertFalse(exactMatch("a_", '0')); + public void testWildcardExactMatch() { + assertTrue(likeExactMatch("\\*")); + assertTrue(likeExactMatch("\\?")); + assertTrue(likeExactMatch("123")); + assertTrue(likeExactMatch("123\\?")); + assertTrue(likeExactMatch("123\\?321")); + + assertFalse(likeExactMatch("*")); + assertFalse(likeExactMatch("**")); + assertFalse(likeExactMatch("a*")); + assertFalse(likeExactMatch("a?")); } - public void testRegexExactMatch() throws Exception { - assertFalse(exactMatch(".*")); - assertFalse(exactMatch(".*.*")); - assertFalse(exactMatch(".*.?")); - assertFalse(exactMatch(".?.*")); - assertFalse(exactMatch(".*.?.*")); - assertFalse(exactMatch("..*")); - assertFalse(exactMatch("ab.")); - assertFalse(exactMatch("..?")); - - assertTrue(exactMatch("abc")); - assertTrue(exactMatch("12345")); + public void testRegexExactMatch() { + assertFalse(rlikeExactMatch(".*")); + assertFalse(rlikeExactMatch(".*.*")); + assertFalse(rlikeExactMatch(".*.?")); + assertFalse(rlikeExactMatch(".?.*")); + assertFalse(rlikeExactMatch(".*.?.*")); + assertFalse(rlikeExactMatch("..*")); + assertFalse(rlikeExactMatch("ab.")); + assertFalse(rlikeExactMatch("..?")); + + assertTrue(rlikeExactMatch("abc")); + assertTrue(rlikeExactMatch("12345")); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java index a74ceb4e1426c..c2e85cc43284a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java @@ -17,8 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -101,7 +99,6 @@ public void testConstantNot() { } public void testConstantFoldingLikes() { - assertEquals(TRUE, new ConstantFolding().rule(new Like(EMPTY, of("test_emp"), new LikePattern("test%", (char) 0))).canonical()); assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java index c5e64d41be4dc..20d638a113bf2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; @@ -26,18 +24,6 @@ public class ReplaceRegexMatchTests extends ESTestCase { - public void testMatchAllLikeToExist() { - for (String s : asList("%", "%%", "%%%")) { - LikePattern pattern = new LikePattern(s, (char) 0); - FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(IsNotNull.class, e.getClass()); - IsNotNull inn = (IsNotNull) e; - assertEquals(fa, inn.field()); - } - } - public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { WildcardPattern pattern = new WildcardPattern(s); @@ -60,31 +46,19 @@ public void testMatchAllRLikeToExist() { assertEquals(fa, inn.field()); } - public void testExactMatchLike() { - for (String s : asList("ab", "ab0%", "ab0_c")) { - LikePattern pattern = new LikePattern(s, '0'); + public void testExactMatchWildcardLike() { + for (String s : asList("ab", "ab\\*", "ab\\?c")) { + WildcardPattern pattern = new WildcardPattern(s); FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); + WildcardLike l = new WildcardLike(EMPTY, fa, pattern); Expression e = new ReplaceRegexMatch().rule(l); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); - assertEquals(s.replace("0", StringUtils.EMPTY), eq.right().fold()); + assertEquals(s.replace("\\", StringUtils.EMPTY), eq.right().fold()); } } - public void testExactMatchWildcardLike() { - String s = "ab"; - WildcardPattern pattern = new WildcardPattern(s); - FieldAttribute fa = getFieldAttribute(); - WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(s, eq.right().fold()); - } - public void testExactMatchRLike() { RLikePattern pattern = new RLikePattern("abc"); FieldAttribute fa = getFieldAttribute(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 7075c9fe58d63..2bee0188b9fab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -28,8 +28,6 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -422,12 +420,6 @@ public void accept(Page page) { } return b.toString(); } - } else if (toBuildClass == Like.class) { - - if (argClass == LikePattern.class) { - return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); - } - } else if (argClass == Dissect.Parser.class) { // Dissect.Parser is a record / final, cannot be mocked String pattern = randomDissectPattern(); From af18f1027b0b0b4616668feccb30eeaf86e56cda Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 21 Oct 2024 12:34:29 +0200 Subject: [PATCH 28/67] Fix scale up for model allocations (#115189) --- .../ml/autoscaling/MlAutoscalingContext.java | 2 +- .../MlAutoscalingDeciderServiceTests.java | 48 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index f266dda6e3e5d..dfe52897caf2c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -177,7 +177,7 @@ public boolean isEmpty() { return anomalyDetectionTasks.isEmpty() && snapshotUpgradeTasks.isEmpty() && dataframeAnalyticsTasks.isEmpty() - && modelAssignments.values().stream().allMatch(assignment -> assignment.totalTargetAllocations() == 0); + && modelAssignments.values().stream().allMatch(assignment -> assignment.getTaskParams().getNumberOfAllocations() == 0); } public List findPartiallyAllocatedModels() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index a1db31c474f31..cf78e5f900e15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -54,6 +54,7 @@ import static org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator.STATIC_JVM_UPPER_THRESHOLD; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; @@ -331,6 +332,53 @@ public void testScale_GivenModelWithZeroAllocations() { assertThat(result.requiredCapacity().node().memory().getBytes(), equalTo(0L)); } + public void testScale_GivenTrainedModelAllocationAndNoMlNode() { + MlAutoscalingDeciderService service = buildService(); + service.onMaster(); + + ClusterState clusterState = new ClusterState.Builder(new ClusterName("cluster")).metadata( + Metadata.builder() + .putCustom( + TrainedModelAssignmentMetadata.NAME, + new TrainedModelAssignmentMetadata( + Map.of( + "model", + TrainedModelAssignment.Builder.empty( + new StartTrainedModelDeploymentAction.TaskParams( + "model", + "model-deployment", + 400, + 1, + 2, + 100, + null, + Priority.NORMAL, + 0L, + 0L + ), + new AdaptiveAllocationsSettings(true, 0, 4) + ).setAssignmentState(AssignmentState.STARTING).build() + ) + ) + ) + .build() + ).build(); + + AutoscalingDeciderResult result = service.scale( + Settings.EMPTY, + new DeciderContext( + clusterState, + new AutoscalingCapacity(AutoscalingCapacity.AutoscalingResources.ZERO, AutoscalingCapacity.AutoscalingResources.ZERO) + ) + ); + + assertThat(result.reason().summary(), containsString("requesting scale up")); + assertThat(result.requiredCapacity().total().memory().getBytes(), greaterThan(TEST_JOB_SIZE)); + assertThat(result.requiredCapacity().total().processors().count(), equalTo(2.0)); + assertThat(result.requiredCapacity().node().memory().getBytes(), greaterThan(TEST_JOB_SIZE)); + assertThat(result.requiredCapacity().node().processors().count(), equalTo(2.0)); + } + private DiscoveryNode buildNode(String id, ByteSizeValue machineMemory, int allocatedProcessors) { return DiscoveryNodeUtils.create( id, From 8c23fd77122cea2e235718034ddfcb4a2e945d92 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 21 Oct 2024 21:38:46 +1100 Subject: [PATCH 29/67] [Test] Flush response body for progress (#115177) In JDK23, response headers are no longer always immediately sent. See also https://bugs.openjdk.org/browse/JDK-8331847 This PR adds flush call for the response body to make progress. Resolves: #115145 Resolves: #115164 --- .../repositories/s3/S3BlobContainerRetriesTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 2eb2ed26153f9..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -586,16 +586,16 @@ public void handle(HttpExchange exchange) throws IOException { ), -1 ); + exchange.getResponseBody().flush(); } else if (randomBoolean()) { final var bytesSent = sendIncompleteContent(exchange, bytes); if (bytesSent < meaningfulProgressBytes) { failuresWithoutProgress += 1; - } else { - exchange.getResponseBody().flush(); } } else { failuresWithoutProgress += 1; } + exchange.getResponseBody().flush(); exchange.close(); } } @@ -640,6 +640,7 @@ public void handle(HttpExchange exchange) throws IOException { failureCount += 1; Streams.readFully(exchange.getRequestBody()); sendIncompleteContent(exchange, bytes); + exchange.getResponseBody().flush(); exchange.close(); } } From 3fad5f485880e1b3f88dc135f0dbeccbd517c1e4 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 21 Oct 2024 12:47:18 +0200 Subject: [PATCH 30/67] Enable tests for out of range comparisons for float/half_float fields (#113122) * Enable tests for out of range comparisons for float/half_float fields * Address feedback comments * Implement suggestions --------- Co-authored-by: Elastic Machine --- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 5 +- .../LocalPhysicalPlanOptimizerTests.java | 71 ++++++++++++++++--- 2 files changed, 62 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index e3199649a91be..2a50988e9e35e 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -398,9 +398,8 @@ public void testOutOfRangeComparisons() throws IOException { "long", // TODO: https://github.com/elastic/elasticsearch/issues/102935 // "unsigned_long", - // TODO: https://github.com/elastic/elasticsearch/issues/100130 - // "half_float", - // "float", + "half_float", + "float", "double", "scaled_float" ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 72060bccb520a..3436502610d62 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; @@ -147,6 +148,10 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes ); } + private Analyzer makeAnalyzer(String mappingFileName) { + return makeAnalyzer(mappingFileName, new EnrichResolution()); + } + /** * Expects * LimitExec[1000[INTEGER]] @@ -449,7 +454,7 @@ public void testQueryStringFunctionWithFunctionsPushedToLucene() { from test | where qstr("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") """; - var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var analyzer = makeAnalyzer("mapping-all-types.json"); var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); var limit = as(plan, LimitExec.class); @@ -610,7 +615,7 @@ public void testMatchFunctionWithFunctionsPushedToLucene() { from test | where match(text, "beta") and cidr_match(ip, "127.0.0.1/32") """; - var analyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var analyzer = makeAnalyzer("mapping-all-types.json"); var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); var limit = as(plan, LimitExec.class); @@ -892,8 +897,15 @@ public void testIsNotNull_TextField_Pushdown_WithCount() { private record OutOfRangeTestCase(String fieldName, String tooLow, String tooHigh) {}; + private static final String LT = "<"; + private static final String LTE = "<="; + private static final String GT = ">"; + private static final String GTE = ">="; + private static final String EQ = "=="; + private static final String NEQ = "!="; + public void testOutOfRangeFilterPushdown() { - var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json"); String largerThanInteger = String.valueOf(randomLongBetween(Integer.MAX_VALUE + 1L, Long.MAX_VALUE)); String smallerThanInteger = String.valueOf(randomLongBetween(Long.MIN_VALUE, Integer.MIN_VALUE - 1L)); @@ -910,16 +922,8 @@ public void testOutOfRangeFilterPushdown() { new OutOfRangeTestCase("integer", smallerThanInteger, largerThanInteger), new OutOfRangeTestCase("long", smallerThanLong, largerThanLong) // TODO: add unsigned_long https://github.com/elastic/elasticsearch/issues/102935 - // TODO: add half_float, float https://github.com/elastic/elasticsearch/issues/100130 ); - final String LT = "<"; - final String LTE = "<="; - final String GT = ">"; - final String GTE = ">="; - final String EQ = "=="; - final String NEQ = "!="; - for (OutOfRangeTestCase testCase : cases) { List trueForSingleValuesPredicates = List.of( LT + testCase.tooHigh, @@ -972,6 +976,51 @@ public void testOutOfRangeFilterPushdown() { } } + public void testOutOfRangeFilterPushdownWithFloatAndHalfFloat() { + var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json"); + + String smallerThanFloat = String.valueOf(randomDoubleBetween(-Double.MAX_VALUE, -Float.MAX_VALUE - 1d, true)); + String largerThanFloat = String.valueOf(randomDoubleBetween(Float.MAX_VALUE + 1d, Double.MAX_VALUE, true)); + + List cases = List.of( + new OutOfRangeTestCase("float", smallerThanFloat, largerThanFloat), + new OutOfRangeTestCase("half_float", smallerThanFloat, largerThanFloat) + ); + + for (OutOfRangeTestCase testCase : cases) { + for (var value : List.of(testCase.tooHigh, testCase.tooLow)) { + for (String predicate : List.of(LT, LTE, GT, GTE, EQ, NEQ)) { + String comparison = testCase.fieldName + predicate + value; + var query = "from test | where " + comparison; + + Source expectedSource = new Source(1, 18, comparison); + + logger.info("Query: " + query); + EsQueryExec actualQueryExec = doTestOutOfRangeFilterPushdown(query, allTypeMappingAnalyzer); + + assertThat(actualQueryExec.query(), is(instanceOf(SingleValueQuery.Builder.class))); + var actualLuceneQuery = (SingleValueQuery.Builder) actualQueryExec.query(); + assertThat(actualLuceneQuery.field(), equalTo(testCase.fieldName)); + assertThat(actualLuceneQuery.source(), equalTo(expectedSource)); + + QueryBuilder actualInnerLuceneQuery = actualLuceneQuery.next(); + + if (predicate.equals(EQ)) { + QueryBuilder expectedInnerQuery = QueryBuilders.termQuery(testCase.fieldName, Double.parseDouble(value)); + assertThat(actualInnerLuceneQuery, equalTo(expectedInnerQuery)); + } else if (predicate.equals(NEQ)) { + QueryBuilder expectedInnerQuery = QueryBuilders.boolQuery() + .mustNot(QueryBuilders.termQuery(testCase.fieldName, Double.parseDouble(value))); + assertThat(actualInnerLuceneQuery, equalTo(expectedInnerQuery)); + } else { // one of LT, LTE, GT, GTE + assertTrue(actualInnerLuceneQuery instanceof RangeQueryBuilder); + assertThat(((RangeQueryBuilder) actualInnerLuceneQuery).fieldName(), equalTo(testCase.fieldName)); + } + } + } + } + } + /** * Expects e.g. * LimitExec[1000[INTEGER]] From 1cae3c83615fcd7f716b4f00dc4ac8aad2215906 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 12:51:10 +0200 Subject: [PATCH 31/67] [DOCS] Documents that dynamic templates are not supported by semantic_text. (#115195) --- docs/reference/mapping/types/semantic-text.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 07abbff986643..ac23c153e01a3 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -221,4 +221,5 @@ Notice that both the `semantic_text` field and the source field are updated in t `semantic_text` field types have the following limitations: * `semantic_text` fields are not currently supported as elements of <>. +* `semantic_text` fields can't currently be set as part of <>. * `semantic_text` fields can't be defined as <> of another field, nor can they contain other fields as multi-fields. From f2567525011ae14f3b15b8a4d4b0161e60530432 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 21 Oct 2024 12:56:56 +0200 Subject: [PATCH 32/67] [DOCS] Removes experimental tag from Inference API pages (#113857) --- docs/reference/inference/delete-inference.asciidoc | 2 -- docs/reference/inference/get-inference.asciidoc | 2 -- docs/reference/inference/inference-apis.asciidoc | 2 -- docs/reference/inference/post-inference.asciidoc | 2 -- docs/reference/inference/put-inference.asciidoc | 2 -- docs/reference/inference/update-inference.asciidoc | 2 -- 6 files changed, 12 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 4fc4beaca6d8e..a83fb1a516b80 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -2,8 +2,6 @@ [[delete-inference-api]] === Delete {infer} API -experimental[] - Deletes an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index d991729fe77c9..16e38d2aa148b 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -2,8 +2,6 @@ [[get-inference-api]] === Get {infer} API -experimental[] - Retrieves {infer} endpoint information. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index e756831075027..b291b464be498 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -2,8 +2,6 @@ [[inference-apis]] == {infer-cap} APIs -experimental[] - IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio or Hugging Face. For built-in models and models uploaded diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index ce51abaff07f8..4edefcc911e2e 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -2,8 +2,6 @@ [[post-inference-api]] === Perform inference API -experimental[] - Performs an inference task on an input text by using an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 6d6b61ffea771..e7e25ec98b49d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -2,8 +2,6 @@ [[put-inference-api]] === Create {infer} API -experimental[] - Creates an {infer} endpoint to perform an {infer} task. [IMPORTANT] diff --git a/docs/reference/inference/update-inference.asciidoc b/docs/reference/inference/update-inference.asciidoc index 01a99d7f53062..efd29231ac12e 100644 --- a/docs/reference/inference/update-inference.asciidoc +++ b/docs/reference/inference/update-inference.asciidoc @@ -2,8 +2,6 @@ [[update-inference-api]] === Update inference API -experimental[] - Updates an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI, Anthropic, Watsonx.ai, or Hugging Face. From 671458a999c53c7c8b9df05ed2a2269a7a4a3d68 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Mon, 21 Oct 2024 13:01:58 +0200 Subject: [PATCH 33/67] Always flush response body in AbstractBlobContainerRetriesTestCase#sendIncompleteContent with JDK23 (#115197) Resolves https://github.com/elastic/elasticsearch/issues/115172 --- .../blobstore/AbstractBlobContainerRetriesTestCase.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 90c621c62c305..12094b31a049d 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -418,7 +418,9 @@ protected int sendIncompleteContent(HttpExchange exchange, byte[] bytes) throws if (bytesToSend > 0) { exchange.getResponseBody().write(bytes, rangeStart, bytesToSend); } - if (randomBoolean()) { + if (randomBoolean() || Runtime.version().feature() >= 23) { + // For now in JDK23 we need to always flush. See https://bugs.openjdk.org/browse/JDK-8331847. + // TODO: remove the JDK version check once that issue is fixed exchange.getResponseBody().flush(); } return bytesToSend; From 8efd08b019b9160f5e703520c6ce3b6a9f92cfbd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 21 Oct 2024 13:38:23 +0200 Subject: [PATCH 34/67] Upgrade to Lucene 10 (#114741) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The most relevant ES changes that upgrading to Lucene 10 requires are: - use the appropriate IOContext - Scorer / ScorerSupplier breaking changes - Regex automaton are no longer determinized by default - minimize moved to test classes - introduce Elasticsearch900Codec - adjust slicing code according to the added support for intra-segment concurrency - disable intra-segment concurrency in tests - adjust accessor methods for many Lucene classes that became a record - adapt to breaking changes in the analysis area Co-authored-by: Christoph Büscher Co-authored-by: Mayya Sharipova Co-authored-by: ChrisHegarty Co-authored-by: Brian Seeders Co-authored-by: Armin Braun Co-authored-by: Panagiotis Bailis Co-authored-by: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> --- .../pipelines/lucene-snapshot/run-tests.yml | 1 - .../vector/VectorScorerBenchmark.java | 10 +- .../forbidden/es-server-signatures.txt | 4 - build-tools-internal/version.properties | 2 +- distribution/src/config/jvm.options | 3 + docs/Versions.asciidoc | 4 +- docs/changelog/113482.yaml | 27 + docs/changelog/113614.yaml | 18 + docs/changelog/114124.yaml | 18 + docs/changelog/114146.yaml | 20 + docs/changelog/114741.yaml | 5 + docs/plugins/analysis-nori.asciidoc | 12 +- .../analysis/analyzers/lang-analyzer.asciidoc | 3 +- .../tokenfilters/stemmer-tokenfilter.asciidoc | 1 - .../pathhierarchy-tokenizer.asciidoc | 24 +- docs/reference/search/profile.asciidoc | 2 +- gradle/verification-metadata.xml | 150 +- .../simdvec/VectorScorerFactory.java | 10 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/internal/Int7SQVectorScorer.java | 8 +- .../internal/Int7SQVectorScorerSupplier.java | 24 +- .../simdvec/internal/Int7SQVectorScorer.java | 21 +- .../simdvec/VectorScorerFactoryTests.java | 7 +- modules/analysis-common/build.gradle | 4 + .../common/LegacyRomanianStemmer.java | 741 +++++++++ .../common/PersianAnalyzerProvider.java | 56 +- .../common/RomanianAnalyzerProvider.java | 46 +- .../common/StemmerTokenFilterFactory.java | 68 +- .../common/HighlighterWithAnalyzersTests.java | 2 +- .../common/PersianAnalyzerProviderTests.java | 78 + .../common/RomanianAnalyzerTests.java | 80 + .../StemmerTokenFilterFactoryTests.java | 38 + .../test/analysis-common/20_analyzers.yml | 29 +- .../apm/internal/tracing/APMTracer.java | 5 +- .../datastreams/DataStreamIT.java | 4 +- .../datastreams/TSDBIndexingIT.java | 2 +- .../ingest/geoip/GeoIpDownloaderIT.java | 4 +- .../script/expression/MoreExpressionIT.java | 64 +- .../ExpressionDoubleValuesScript.java | 14 +- .../expression/ExpressionScriptEngine.java | 48 +- .../mustache/SearchTemplateResponseTests.java | 2 +- .../ScriptedMetricAggContextsTests.java | 5 - .../painless/SimilarityScriptTests.java | 4 +- .../RankFeaturesMapperIntegrationIT.java | 8 +- .../TokenCountFieldMapperIntegrationIT.java | 2 +- .../extras/SearchAsYouTypeFieldMapper.java | 4 +- .../extras/SourceConfirmedTextQuery.java | 32 +- .../extras/MatchOnlyTextFieldMapperTests.java | 4 +- .../extras/SourceConfirmedTextQueryTests.java | 2 +- .../extras/SourceIntervalsSourceTests.java | 2 +- .../join/aggregations/ChildrenIT.java | 8 +- .../join/query/ChildQuerySearchIT.java | 126 +- .../elasticsearch/join/query/InnerHitsIT.java | 40 +- .../aggregations/ParentJoinAggregator.java | 9 +- .../ParentChildInnerHitContextBuilder.java | 8 +- .../ChildrenToParentAggregatorTests.java | 2 +- .../ParentToChildrenAggregatorTests.java | 2 +- .../join/query/HasChildQueryBuilderTests.java | 13 +- .../percolator/PercolateQuery.java | 140 +- .../percolator/PercolatorFieldMapper.java | 2 +- .../PercolatorMatchedSlotSubFetchPhase.java | 2 +- .../percolator/QueryAnalyzer.java | 16 +- .../percolator/CandidateQueryTests.java | 64 +- .../percolator/PercolateQueryTests.java | 8 +- .../PercolatorFieldMapperTests.java | 19 +- .../PercolatorQuerySearchTests.java | 2 +- .../percolator/QueryAnalyzerTests.java | 2 +- .../index/reindex/CrossClusterReindexIT.java | 10 +- .../reindex/ReindexValidator.java | 3 +- .../reindex/remote/RemoteResponseParsers.java | 4 +- .../AnnotatedTextFieldMapperTests.java | 2 +- .../AnnotatedTextHighlighterTests.java | 2 +- .../store/smb/SmbMmapFsDirectoryFactory.java | 1 - .../upgrades/FullClusterRestartIT.java | 206 +++ rest-api-spec/build.gradle | 8 +- .../rest-api-spec/test/search/370_profile.yml | 44 +- .../action/IndicesRequestIT.java | 4 +- .../admin/indices/create/CreateIndexIT.java | 4 +- .../admin/indices/create/SplitIndexIT.java | 2 +- .../action/bulk/BulkProcessor2RetryIT.java | 6 +- .../action/bulk/BulkProcessorRetryIT.java | 6 +- .../action/bulk/IncrementalBulkIT.java | 6 +- .../bulk/TransportSimulateBulkActionIT.java | 4 +- .../action/bulk/WriteAckDelayIT.java | 4 +- .../action/search/PointInTimeIT.java | 8 +- .../action/search/TransportSearchIT.java | 14 +- .../elasticsearch/aliases/IndexAliasesIT.java | 30 +- .../broadcast/BroadcastActionsIT.java | 2 +- .../document/DocumentActionsIT.java | 4 +- .../elasticsearch/index/FinalPipelineIT.java | 8 +- .../index/engine/MaxDocsLimitIT.java | 4 +- .../mapper/CopyToMapperIntegrationIT.java | 2 +- .../index/store/ExceptionRetryIT.java | 2 +- .../elasticsearch/indexing/IndexActionIT.java | 8 +- .../indices/IndicesRequestCacheIT.java | 46 +- .../state/CloseWhileRelocatingShardsIT.java | 2 +- .../recovery/RecoveryWhileUnderLoadIT.java | 6 +- .../elasticsearch/recovery/RelocationIT.java | 8 +- .../elasticsearch/routing/AliasRoutingIT.java | 2 +- .../routing/PartitionedRoutingIT.java | 6 +- .../elasticsearch/search/SearchTimeoutIT.java | 4 +- .../search/aggregations/CombiIT.java | 2 +- .../search/aggregations/EquivalenceIT.java | 4 +- .../aggregations/FiltersAggsRewriteIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 6 +- .../bucket/DateHistogramOffsetIT.java | 6 +- .../aggregations/bucket/DateRangeIT.java | 18 +- .../search/aggregations/bucket/FilterIT.java | 2 +- .../search/aggregations/bucket/FiltersIT.java | 4 +- .../aggregations/bucket/GeoDistanceIT.java | 2 +- .../aggregations/bucket/HistogramIT.java | 2 +- .../search/aggregations/bucket/NestedIT.java | 2 +- .../search/aggregations/bucket/RangeIT.java | 2 +- .../aggregations/metrics/ExtendedStatsIT.java | 4 +- .../metrics/HDRPercentileRanksIT.java | 4 +- .../metrics/HDRPercentilesIT.java | 4 +- .../metrics/ScriptedMetricIT.java | 28 +- .../search/aggregations/metrics/StatsIT.java | 2 +- .../search/aggregations/metrics/SumIT.java | 2 +- .../metrics/TDigestPercentileRanksIT.java | 4 +- .../metrics/TDigestPercentilesIT.java | 4 +- .../aggregations/metrics/TopHitsIT.java | 40 +- .../aggregations/metrics/ValueCountIT.java | 2 +- .../basic/SearchWhileCreatingIndexIT.java | 6 +- .../search/basic/SearchWhileRelocatingIT.java | 6 +- .../basic/TransportTwoNodesSearchIT.java | 14 +- .../search/ccs/CrossClusterSearchIT.java | 2 +- .../search/fetch/FetchSubPhasePluginIT.java | 2 +- .../search/fetch/subphase/InnerHitsIT.java | 50 +- .../highlight/HighlighterSearchIT.java | 6 +- .../search/fields/SearchFieldsIT.java | 36 +- .../functionscore/DecayFunctionScoreIT.java | 30 +- .../functionscore/ExplainableScriptIT.java | 2 +- .../search/functionscore/FunctionScoreIT.java | 18 +- .../search/functionscore/QueryRescorerIT.java | 10 +- .../functionscore/RandomScoreFunctionIT.java | 6 +- .../search/nested/SimpleNestedIT.java | 2 +- .../search/profile/query/QueryProfilerIT.java | 4 +- .../elasticsearch/search/query/ExistsIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 10 +- .../search/query/QueryStringIT.java | 2 +- .../search/query/SearchQueryIT.java | 14 +- .../search/query/SimpleQueryStringIT.java | 2 +- .../retriever/MinimalCompoundRetrieverIT.java | 2 +- .../search/retriever/RetrieverRewriteIT.java | 8 +- .../search/routing/SearchPreferenceIT.java | 6 +- .../routing/SearchReplicaSelectionIT.java | 6 +- .../scriptfilter/ScriptQuerySearchIT.java | 8 +- .../search/scroll/DuelScrollIT.java | 10 +- .../search/scroll/SearchScrollIT.java | 34 +- .../search/searchafter/SearchAfterIT.java | 2 +- .../search/simple/SimpleSearchIT.java | 2 +- .../search/slice/SearchSliceIT.java | 10 +- .../search/sort/FieldSortIT.java | 60 +- .../search/sort/SimpleSortIT.java | 8 +- .../search/source/MetadataFetchingIT.java | 4 +- .../similarity/SimilarityIT.java | 4 +- server/src/main/java/module-info.java | 3 +- .../diskusage/IndexDiskUsageAnalyzer.java | 21 +- .../search/BottomSortValuesCollector.java | 2 +- .../CountOnlyQueryPhaseResultConsumer.java | 4 +- .../action/search/SearchPhaseController.java | 4 +- .../bootstrap/BootstrapChecks.java | 2 +- .../elasticsearch/common/lucene/Lucene.java | 6 +- .../lucene/index/FilterableTermsEnum.java | 6 + .../lucene/search/AutomatonQueries.java | 7 +- .../search/CaseInsensitivePrefixQuery.java | 8 +- .../search/CaseInsensitiveWildcardQuery.java | 4 +- .../common/lucene/search/Queries.java | 2 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 13 +- .../common/lucene/search/XMoreLikeThis.java | 5 +- .../search/function/FunctionScoreQuery.java | 89 +- .../search/function/MinScoreScorer.java | 8 +- .../search/function/ScriptScoreFunction.java | 10 +- .../search/function/ScriptScoreQuery.java | 82 +- .../org/elasticsearch/common/regex/Regex.java | 4 +- .../common/settings/KeyStoreWrapper.java | 2 +- .../xcontent/support/XContentMapValues.java | 8 +- .../gateway/PersistedClusterStateService.java | 6 +- .../org/elasticsearch/index/IndexModule.java | 3 +- .../elasticsearch/index/IndexVersions.java | 55 +- .../index/codec/CodecService.java | 6 +- .../codec/DeduplicatingFieldInfosFormat.java | 3 +- .../index/codec/Elasticsearch816Codec.java | 2 +- .../index/codec/Elasticsearch900Codec.java | 131 ++ .../codec/LegacyPerFieldMapperCodec.java | 6 +- .../index/codec/PerFieldMapperCodec.java | 2 +- .../ES85BloomFilterPostingsFormat.java | 8 +- .../ES87BloomFilterPostingsFormat.java | 8 +- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 210 ++- .../codec/tsdb/ES87TSDBDocValuesFormat.java | 46 +- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 139 +- .../vectors/BinarizedByteVectorValues.java | 61 +- .../codec/vectors/ES813FlatVectorFormat.java | 5 - .../vectors/ES813Int8FlatVectorFormat.java | 6 - .../ES814ScalarQuantizedVectorsFormat.java | 15 +- .../vectors/ES815BitFlatVectorsFormat.java | 48 +- .../vectors/ES816BinaryFlatVectorsScorer.java | 22 +- .../ES816BinaryQuantizedVectorsReader.java | 38 +- .../ES816BinaryQuantizedVectorsWriter.java | 143 +- .../vectors/OffHeapBinarizedVectorValues.java | 100 +- ...RandomAccessBinarizedByteVectorValues.java | 84 - .../index/engine/LuceneChangesSnapshot.java | 4 +- .../RecoverySourcePruneMergePolicy.java | 6 + .../index/engine/TranslogDirectoryReader.java | 21 +- .../ordinals/GlobalOrdinalMapping.java | 7 +- .../fielddata/ordinals/MultiOrdinals.java | 15 +- .../index/mapper/DateFieldMapper.java | 5 +- .../index/mapper/DocumentLeafReader.java | 23 +- .../index/mapper/IdFieldMapper.java | 5 +- .../index/mapper/IpPrefixAutomatonUtil.java | 5 +- .../index/mapper/KeywordFieldMapper.java | 4 +- .../index/mapper/LegacyTypeFieldMapper.java | 3 +- .../index/mapper/NumberFieldMapper.java | 9 +- .../index/mapper/StringFieldType.java | 8 +- .../index/mapper/TermBasedFieldType.java | 3 +- .../index/mapper/TextFieldMapper.java | 4 +- .../flattened/FlattenedFieldMapper.java | 8 +- .../KeyedFlattenedLeafFieldData.java | 14 +- .../DenormalizedCosineFloatVectorValues.java | 29 +- .../vectors/DenseVectorFieldMapper.java | 14 +- .../query/CombinedFieldsQueryBuilder.java | 4 +- .../index/query/IntervalBuilder.java | 6 +- .../index/query/NestedQueryBuilder.java | 8 +- .../elasticsearch/index/query/RegexpFlag.java | 10 +- .../index/query/RegexpQueryBuilder.java | 4 +- .../index/query/ScriptQueryBuilder.java | 6 +- .../index/query/TermsSetQueryBuilder.java | 5 +- .../reindex/ClientScrollableHitSource.java | 2 +- .../index/search/MatchQueryParser.java | 9 +- .../index/search/MultiMatchQueryParser.java | 2 +- .../index/search/NestedHelper.java | 41 +- .../index/search/QueryStringQueryParser.java | 9 +- .../RemoveCorruptedLuceneSegmentsAction.java | 4 +- .../index/shard/ShardSplittingQuery.java | 168 +- .../index/store/FsDirectoryFactory.java | 71 +- .../org/elasticsearch/index/store/Store.java | 18 +- .../index/store/StoreFileMetadata.java | 2 +- .../index/termvectors/TermVectorsService.java | 4 +- .../indices/AssociatedIndexDescriptor.java | 2 +- .../indices/IndicesQueryCache.java | 14 - .../indices/SystemIndexDescriptor.java | 10 +- .../elasticsearch/indices/SystemIndices.java | 17 +- .../recovery/RecoverySourceHandler.java | 2 +- .../grouping/GroupingDocValuesSelector.java | 4 +- .../lucene/grouping/TopFieldGroups.java | 4 +- .../queries/BinaryDocValuesRangeQuery.java | 7 +- .../lucene/queries/BlendedTermQuery.java | 13 +- .../lucene/queries/MinDocQuery.java | 7 +- .../queries/SearchAfterSortedDocQuery.java | 8 +- .../lucene/spatial/ShapeDocValuesQuery.java | 20 +- .../elasticsearch/node/NodeConstruction.java | 4 +- .../blobstore/BlobStoreRepository.java | 2 +- .../rest/action/cat/RestCountAction.java | 4 +- .../rest/action/search/RestCountAction.java | 2 +- .../org/elasticsearch/script/ScoreScript.java | 5 + .../SortedSetDocValuesStringFieldScript.java | 5 +- .../script/field/IpDocValuesField.java | 3 +- .../ByteKnnDenseVectorDocValuesField.java | 13 +- .../vectors/KnnDenseVectorDocValuesField.java | 13 +- .../elasticsearch/search/MultiValueMode.java | 8 +- .../elasticsearch/search/SearchFeatures.java | 5 +- .../org/elasticsearch/search/SearchHits.java | 6 +- .../aggregations/MultiBucketCollector.java | 30 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../composite/GlobalOrdinalValuesSource.java | 11 +- .../countedterms/CountedTermsAggregator.java | 4 +- .../bucket/filter/MergedPointRangeQuery.java | 24 - .../bucket/filter/QueryToFilterAdapter.java | 5 +- .../bucket/global/GlobalAggregator.java | 4 +- .../bucket/nested/NestedAggregator.java | 8 - .../bucket/range/BinaryRangeAggregator.java | 3 +- .../sampler/BestDocsDeferringCollector.java | 12 +- .../DiversifiedOrdinalsSamplerAggregator.java | 4 +- .../sampler/random/RandomSamplingQuery.java | 7 +- .../GlobalOrdinalsStringTermsAggregator.java | 16 +- .../bucket/terms/IncludeExclude.java | 6 +- .../metrics/CardinalityAggregator.java | 3 +- .../GlobalOrdCardinalityAggregator.java | 7 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../metrics/MetricInspectionHelper.java | 2 +- .../metrics/TopHitsAggregator.java | 8 +- .../aggregations/support/MissingValues.java | 26 +- .../support/TimeSeriesIndexSearcher.java | 2 +- .../fetch/subphase/UnmappedFieldFetcher.java | 8 +- .../highlight/FragmentBuilderHelper.java | 6 +- .../search/internal/ContextIndexSearcher.java | 42 +- .../internal/ExitableDirectoryReader.java | 146 +- .../FieldUsageTrackingDirectoryReader.java | 14 - .../search/profile/query/ProfileScorer.java | 11 +- .../search/profile/query/ProfileWeight.java | 35 +- .../search/query/QueryPhase.java | 2 +- .../search/query/QueryPhaseCollector.java | 9 +- .../retriever/rankdoc/RankDocsQuery.java | 16 +- .../runtime/AbstractScriptFieldQuery.java | 6 +- ...oPointScriptFieldDistanceFeatureQuery.java | 9 +- .../LongScriptFieldDistanceFeatureQuery.java | 10 +- .../runtime/StringScriptFieldRegexpQuery.java | 8 +- .../StringScriptFieldWildcardQuery.java | 3 +- .../search/slice/DocIdSliceQuery.java | 6 +- .../search/slice/DocValuesSliceQuery.java | 6 +- .../search/slice/TermsSliceQuery.java | 6 +- .../search/sort/ScoreSortBuilder.java | 1 - .../completion/CompletionSuggester.java | 3 +- .../phrase/DirectCandidateGenerator.java | 4 +- .../search/suggest/phrase/LaplaceScorer.java | 2 +- .../phrase/LinearInterpolatingScorer.java | 2 +- .../phrase/NoisyChannelSpellChecker.java | 2 +- .../suggest/phrase/StupidBackoffScorer.java | 4 +- .../search/vectors/DenseVectorQuery.java | 12 +- ...iversifyingChildrenByteKnnVectorQuery.java | 2 +- ...versifyingChildrenFloatKnnVectorQuery.java | 2 +- .../search/vectors/ESKnnByteVectorQuery.java | 2 +- .../search/vectors/ESKnnFloatVectorQuery.java | 2 +- .../search/vectors/KnnScoreDocQuery.java | 6 +- .../search/vectors/VectorSimilarityQuery.java | 19 +- .../SynonymsManagementAPIService.java | 6 +- .../services/org.apache.lucene.codecs.Codec | 1 + .../IndexDiskUsageAnalyzerTests.java | 26 +- ...ountOnlyQueryPhaseResultConsumerTests.java | 8 +- .../action/search/DfsQueryPhaseTests.java | 6 +- .../action/search/FetchSearchPhaseTests.java | 10 +- .../search/SearchPhaseControllerTests.java | 28 +- .../SearchQueryThenFetchAsyncActionTests.java | 8 +- .../search/SearchResponseMergerTests.java | 28 +- .../action/search/SearchResponseTests.java | 4 +- .../AbstractTermVectorsTestCase.java | 2 +- .../MetadataCreateIndexServiceTests.java | 4 + .../common/lucene/LuceneTests.java | 33 +- .../lucene/index/FreqTermsEnumTests.java | 4 +- .../search/function/MinScoreScorerTests.java | 31 +- .../morelikethis/XMoreLikeThisTests.java | 6 +- .../deps/lucene/SimpleLuceneTests.java | 6 +- .../deps/lucene/VectorHighlighterTests.java | 8 +- .../gateway/MetadataStateFormatTests.java | 2 +- .../PersistedClusterStateServiceTests.java | 4 +- .../index/IndexServiceTests.java | 6 +- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../tsdb/ES87TSDBDocValuesFormatTests.java | 2 - ...ValuesFormatVariableSkipIntervalTests.java | 196 +++ .../BaseKnnBitVectorsFormatTestCase.java | 6 +- .../vectors/ES813FlatVectorFormatTests.java | 4 +- .../ES813Int8FlatVectorFormatTests.java | 4 +- ...HnswScalarQuantizedVectorsFormatTests.java | 21 +- .../ES815BitFlatVectorFormatTests.java | 4 +- .../ES815HnswBitVectorsFormatTests.java | 4 +- .../ES816BinaryFlatVectorsScorerTests.java | 43 +- ...S816BinaryQuantizedVectorsFormatTests.java | 28 +- ...HnswBinaryQuantizedVectorsFormatTests.java | 12 +- .../codec/zstd/StoredFieldCodecDuelTests.java | 6 +- ...estCompressionStoredFieldsFormatTests.java | 4 +- ...td814BestSpeedStoredFieldsFormatTests.java | 4 +- .../engine/CompletionStatsCacheTests.java | 4 +- .../index/engine/InternalEngineTests.java | 69 +- .../index/engine/LiveVersionMapTests.java | 27 +- .../RecoverySourcePruneMergePolicyTests.java | 10 +- .../index/engine/SegmentTests.java | 2 +- .../AbstractFieldDataImplTestCase.java | 12 +- .../AbstractStringFieldDataTestCase.java | 22 +- .../ordinals/MultiOrdinalsTests.java | 2 - .../FieldStatsProviderRefreshTests.java | 6 +- .../mapper/BooleanScriptFieldTypeTests.java | 12 +- .../index/mapper/DoubleIndexingDocTests.java | 14 +- .../mapper/DoubleScriptFieldTypeTests.java | 17 +- .../mapper/FieldNamesFieldMapperTests.java | 1 - .../index/mapper/IdFieldTypeTests.java | 4 +- .../mapper/IpPrefixAutomatonUtilTests.java | 9 +- .../index/mapper/IpScriptFieldTypeTests.java | 8 +- .../index/mapper/KeywordFieldTypeTests.java | 2 +- .../mapper/KeywordScriptFieldTypeTests.java | 17 +- .../mapper/LongScriptFieldTypeTests.java | 17 +- .../mapper/StoredNumericValuesTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 2 +- .../KeyedFlattenedFieldTypeTests.java | 2 +- .../KeyedFlattenedLeafFieldDataTests.java | 9 +- ...ormalizedCosineFloatVectorValuesTests.java | 8 +- .../KnnDenseVectorScriptDocValuesTests.java | 104 +- .../index/query/BoolQueryBuilderTests.java | 10 +- .../CombinedFieldsQueryParsingTests.java | 8 +- .../DistanceFeatureQueryBuilderTests.java | 4 +- .../index/query/ExistsQueryBuilderTests.java | 4 +- .../MatchBoolPrefixQueryBuilderTests.java | 13 +- .../index/query/MatchQueryBuilderTests.java | 11 +- .../query/MoreLikeThisQueryBuilderTests.java | 4 +- .../query/QueryStringQueryBuilderTests.java | 12 +- .../query/SimpleQueryStringBuilderTests.java | 2 +- .../query/SpanMultiTermQueryBuilderTests.java | 9 +- .../query/TermsSetQueryBuilderTests.java | 8 +- .../IndexLevelReplicationTests.java | 2 +- .../AbstractNumberNestedSortingTestCase.java | 14 +- .../nested/DoubleNestedSortingTests.java | 2 +- .../nested/FloatNestedSortingTests.java | 2 +- .../search/nested/NestedSortingTests.java | 104 +- .../index/shard/IndexReaderWrapperTests.java | 8 +- .../index/shard/IndexShardTests.java | 8 +- .../index/shard/RefreshListenersTests.java | 5 +- .../index/shard/ShardSplittingQueryTests.java | 4 +- .../similarity/ScriptedSimilarityTests.java | 4 +- .../index/store/FsDirectoryFactoryTests.java | 60 +- .../indices/IndicesQueryCacheTests.java | 25 +- .../indices/IndicesRequestCacheTests.java | 4 +- ...PassGroupingCollectorSearchAfterTests.java | 6 +- .../SinglePassGroupingCollectorTests.java | 12 +- .../lucene/queries/BlendedTermQueryTests.java | 28 +- .../CustomUnifiedHighlighterTests.java | 2 +- .../script/ScriptTermStatsTests.java | 4 +- .../search/MultiValueModeTests.java | 13 +- .../search/SearchCancellationTests.java | 16 +- .../search/SearchServiceTests.java | 34 +- .../MultiBucketCollectorTests.java | 84 +- .../bucket/ShardSizeTestCase.java | 4 +- .../composite/CompositeAggregatorTests.java | 10 +- .../CompositeValuesCollectorQueueTests.java | 148 +- .../SingleDimensionValuesSourceTests.java | 3 +- .../range/BinaryRangeAggregatorTests.java | 4 +- .../terms/RareTermsAggregatorTests.java | 2 +- .../bucket/terms/TermsAggregatorTests.java | 4 +- .../metrics/InternalTopHitsTests.java | 8 +- .../metrics/TopHitsAggregatorTests.java | 14 +- .../support/IncludeExcludeTests.java | 9 +- .../support/MissingValuesTests.java | 9 +- .../support/TimeSeriesIndexSearcherTests.java | 5 +- .../internal/ContextIndexSearcherTests.java | 33 +- .../query/ProfileCollectorManagerTests.java | 4 +- .../profile/query/ProfileScorerTests.java | 30 +- .../profile/query/QueryProfilerTests.java | 5 - .../query/QueryPhaseCollectorTests.java | 76 +- .../search/query/QueryPhaseTests.java | 154 +- .../search/query/QueryPhaseTimeoutTests.java | 86 +- .../rankdoc/RankDocsQueryBuilderTests.java | 8 +- .../sort/BucketedSortForFloatsTests.java | 6 - .../CategoryContextMappingTests.java | 5 +- .../phrase/DirectCandidateGeneratorTests.java | 8 +- .../AbstractDenseVectorQueryTestCase.java | 2 +- .../vectors/KnnScoreDocQueryBuilderTests.java | 4 +- .../vectors/VectorSimilarityQueryTests.java | 8 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- ...ncySimulatingBlobStoreRepositoryTests.java | 2 +- .../index/engine/EngineTestCase.java | 9 +- .../AbstractScriptFieldTypeTestCase.java | 2 +- .../index/mapper/FieldTypeTestCase.java | 2 + .../analysis/AnalysisFactoryTestCase.java | 4 +- .../search/SearchResponseUtils.java | 2 +- .../aggregations/AggregatorTestCase.java | 31 +- .../metrics/AbstractGeoTestCase.java | 2 +- .../metrics/CentroidAggregationTestBase.java | 2 +- .../SpatialBoundsAggregationTestBase.java | 2 +- .../geo/BasePointShapeQueryTestCase.java | 18 +- .../search/geo/BaseShapeQueryTestCase.java | 8 +- .../geo/DatelinePointShapeQueryTestCase.java | 6 +- .../geo/GeoBoundingBoxQueryIntegTestCase.java | 12 +- .../search/geo/GeoShapeQueryTestCase.java | 2 +- .../elasticsearch/test/CorruptionUtils.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 6 +- .../org/elasticsearch/test/ESTestCase.java | 42 + .../engine/ThrowingLeafReaderWrapper.java | 7 - .../hamcrest/ElasticsearchAssertions.java | 12 +- .../rate/TimeSeriesRateAggregatorTests.java | 16 +- .../StringStatsAggregatorTests.java | 2 +- .../search/AsyncSearchIntegTestCase.java | 4 +- .../search/AsyncSearchSingleNodeTests.java | 4 +- .../common/BlobCacheBufferedIndexInput.java | 3 +- .../repository/CcrRestoreSourceService.java | 2 +- .../elasticsearch/xpack/CcrIntegTestCase.java | 2 +- .../sourceonly/SourceOnlySnapshotIT.java | 2 +- .../frozen/RewriteCachingDirectoryReader.java | 18 +- .../sourceonly/SourceOnlySnapshot.java | 1 + .../AbstractTransportGetResourcesAction.java | 2 +- .../ml/dataframe/evaluation/Evaluation.java | 2 +- .../xpack/core/security/ScrollHelper.java | 6 +- .../profile/SuggestProfilesResponse.java | 4 +- .../accesscontrol/FieldSubsetReader.java | 16 - .../permission/ApplicationPermission.java | 4 +- .../authz/permission/ClusterPermission.java | 3 +- .../authz/permission/FieldPermissions.java | 13 +- .../authz/permission/IndicesPermission.java | 4 +- .../security/authz/privilege/Privilege.java | 3 +- .../core/security/support/Automatons.java | 84 +- .../termsenum/action/SimpleTermCountEnum.java | 6 + .../SourceOnlySnapshotShardTests.java | 12 +- .../sourceonly/SourceOnlySnapshotTests.java | 13 +- .../WeightedTokensQueryBuilderTests.java | 6 +- .../DocumentSubsetReaderTests.java | 8 +- .../accesscontrol/FieldSubsetReaderTests.java | 60 +- ...ityIndexReaderWrapperIntegrationTests.java | 2 +- .../authz/privilege/IndexPrivilegeTests.java | 16 +- .../authz/privilege/PrivilegeTests.java | 14 +- .../security/support/AutomatonsTests.java | 5 +- .../xpack/enrich/EnrichPolicyRunnerTests.java | 70 +- .../EnrichShardMultiSearchActionTests.java | 2 +- .../connector/ConnectorIndexService.java | 4 +- .../syncjob/ConnectorSyncJobIndexService.java | 2 +- .../rules/QueryRulesIndexService.java | 2 +- .../search/SearchApplicationIndexService.java | 2 +- .../xpack/eql/action/EqlSearchResponse.java | 4 +- .../test/resources/querytranslator_tests.txt | 6 +- .../predicate/regex/RLikePattern.java | 6 +- .../predicate/regex/WildcardPattern.java | 4 +- .../compute/lucene/LuceneSliceQueue.java | 4 +- .../lucene/LuceneTopNSourceOperator.java | 4 +- .../lucene/PartialLeafReaderContext.java | 5 + .../enrich/EnrichQuerySourceOperator.java | 3 +- .../function/scalar/string/AutomataMatch.java | 2 +- .../xpack/esql/parser/ExpressionBuilder.java | 2 +- .../querydsl/query/SingleValueMatchQuery.java | 58 +- .../EnrichQuerySourceOperatorTests.java | 2 +- .../querydsl/query/SingleValueQueryTests.java | 7 +- .../xpack/graph/test/GraphTests.java | 4 +- .../action/TransportGraphExploreAction.java | 6 +- .../ShardBulkInferenceActionFilterIT.java | 2 +- .../mapper/SemanticTextFieldMapperTests.java | 8 +- .../queries/SemanticQueryBuilderTests.java | 10 +- .../action/TransportGetPipelineAction.java | 8 +- .../mapper/ConstantKeywordFieldMapper.java | 6 +- .../CountedKeywordFieldMapper.java | 7 +- .../CountedKeywordFieldTypeTests.java | 6 +- .../unsignedlong/UnsignedLongFieldMapper.java | 2 +- .../xpack/versionfield/VersionEncoder.java | 5 +- .../VersionFieldWildcardQuery.java | 6 +- .../VersionStringDocValuesField.java | 3 +- .../VersionStringFieldMapper.java | 3 +- .../versionfield/VersionStringFieldTests.java | 38 +- .../ml/integration/DeleteExpiredDataIT.java | 2 +- .../MlNativeAutodetectIntegTestCase.java | 2 +- .../xpack/ml/integration/PersistJobIT.java | 6 +- .../xpack/ml/integration/RegressionIT.java | 2 +- .../ml/integration/RevertModelSnapshotIT.java | 2 +- .../integration/RunDataFrameAnalyticsIT.java | 4 +- .../BucketCorrelationAggregationIT.java | 2 +- .../xpack/ml/integration/DatafeedCcsIT.java | 2 +- .../integration/MlDistributedFailureIT.java | 2 +- .../TransportGetOverallBucketsAction.java | 2 +- .../TransportPutTrainedModelAction.java | 4 +- ...ransportStartDataFrameAnalyticsAction.java | 2 +- .../extractor/DataExtractorUtils.java | 2 +- .../persistence/DatafeedConfigProvider.java | 4 +- .../extractor/DataFrameDataExtractor.java | 4 +- .../dataframe/inference/InferenceRunner.java | 2 +- .../ml/dataframe/steps/InferenceStep.java | 2 +- .../TrainTestSplitterFactory.java | 2 +- .../ChunkedTrainedModelRestorer.java | 2 +- .../persistence/TrainedModelProvider.java | 2 +- .../ml/job/persistence/JobConfigProvider.java | 2 +- .../ml/job/persistence/JobDataDeleter.java | 2 +- .../job/persistence/JobResultsProvider.java | 18 +- .../retention/ExpiredForecastsRemover.java | 2 +- .../persistence/BatchedDocumentsIterator.java | 2 +- .../SearchAfterDocumentsIterator.java | 2 +- .../SparseVectorQueryBuilderTests.java | 4 +- .../TextExpansionQueryBuilderTests.java | 4 +- .../monitoring/integration/MonitoringIT.java | 4 +- .../local/LocalExporterIntegTests.java | 4 +- .../LocalExporterResourceIntegTests.java | 6 +- .../lucene/bwc/AbstractArchiveTestCase.java | 2 +- .../xpack/lucene/bwc/OldSegmentInfos.java | 4 +- .../xpack/lucene/bwc/codecs/BWCCodec.java | 7 +- .../index/LegacyDocValuesIterables.java | 17 +- .../LegacySortedSetDocValuesWrapper.java | 6 +- .../lucene/bwc/codecs/lucene50/ForUtil.java | 12 +- .../lucene50/Lucene50FieldInfosFormat.java | 2 + .../lucene54/Lucene54DocValuesProducer.java | 6 + .../lucene60/MetadataOnlyBKDReader.java | 22 +- .../bwc/codecs/lucene70/BWCLucene70Codec.java | 8 +- .../bwc/codecs/lucene70/IndexedDISI.java | 327 ++++ .../bwc/codecs/lucene70/Lucene70Codec.java | 15 + .../lucene70/Lucene70DocValuesConsumer.java | 681 ++++++++ .../lucene70/Lucene70DocValuesFormat.java | 171 ++ .../lucene70/Lucene70DocValuesProducer.java | 1461 +++++++++++++++++ .../services/org.apache.lucene.codecs.Codec | 1 + .../org.apache.lucene.codecs.DocValuesFormat | 1 + .../bwc/codecs/OldCodecsAvailableTests.java | 2 +- .../lucene50/BlockPostingsFormat3Tests.java | 8 +- .../Lucene54DocValuesFormatTests.java | 4 +- .../Lucene70DocValuesFormatTests.java | 26 + .../action/TransportGetStackTracesAction.java | 4 +- .../action/TransportGetStatusAction.java | 2 +- .../predicate/regex/LikePattern.java | 4 +- .../predicate/regex/RLikePattern.java | 6 +- .../predicate/regex/WildcardPattern.java | 4 +- .../xpack/rank/rrf/RRFRankMultiShardIT.java | 6 +- .../xpack/rank/rrf/RRFRankSingleShardIT.java | 6 +- .../xpack/rank/rrf/RRFRetrieverBuilderIT.java | 32 +- .../rrf/RRFRetrieverBuilderNestedDocsIT.java | 4 +- .../PinnedQueryBuilderIT.java | 4 +- .../searchbusinessrules/CappedScoreQuery.java | 30 +- .../CappedScoreWeight.java | 19 +- .../searchbusinessrules/CappedScorer.java | 5 +- ...pshotsCanMatchOnCoordinatorIntegTests.java | 8 +- ...napshotsRecoverFromSnapshotIntegTests.java | 2 +- ...archableSnapshotsRepositoryIntegTests.java | 8 +- ...tsBlobStoreCacheMaintenanceIntegTests.java | 2 +- .../BlobStoreCacheMaintenanceService.java | 2 +- .../cache/full/PersistentCache.java | 7 +- .../input/CachedBlobContainerIndexInput.java | 3 +- .../AbstractSearchableSnapshotsTestCase.java | 4 +- .../InMemoryNoOpCommitDirectoryTests.java | 4 +- .../SearchableSnapshotDirectoryTests.java | 10 +- ...tRemoteClusterSecurityDlsAndFlsRestIT.java | 2 +- .../RemoteClusterSecurityCcrIT.java | 2 +- .../RemoteClusterSecurityCcrMigrationIT.java | 2 +- .../RemoteClusterSecurityMutualTlsIT.java | 2 +- .../RemoteClusterSecurityRestIT.java | 2 +- .../RemoteClusterSecuritySpecialUserIT.java | 2 +- .../RemoteClusterSecurityTopologyRestIT.java | 4 +- .../DateMathExpressionIntegTests.java | 2 +- .../integration/DlsFlsRequestCacheTests.java | 2 +- .../DocumentLevelSecurityRandomTests.java | 2 +- .../DocumentLevelSecurityTests.java | 22 +- .../FieldLevelSecurityRandomTests.java | 6 +- .../integration/FieldLevelSecurityTests.java | 48 +- .../integration/KibanaUserRoleIntegTests.java | 8 +- .../MultipleIndicesPermissionsTests.java | 2 +- .../authc/esnative/NativeRealmIntegTests.java | 8 +- .../security/authz/ReadActionsTests.java | 12 +- .../security/authz/SecurityScrollTests.java | 4 +- .../security/profile/ProfileIntegTests.java | 2 +- .../xpack/security/authc/ApiKeyService.java | 2 +- .../authc/esnative/NativeUsersStore.java | 6 +- .../xpack/security/authz/RBACEngine.java | 3 +- .../DeprecationRoleDescriptorConsumer.java | 4 +- .../authz/store/NativeRolesStore.java | 12 +- .../security/profile/ProfileService.java | 6 +- .../security/support/SecurityMigrations.java | 4 +- .../authz/store/FileRolesStoreTests.java | 16 +- .../slm/SLMSnapshotBlockingIntegTests.java | 2 +- .../SnapshotBasedIndexRecoveryIT.java | 4 +- .../GeoGridAggAndQueryConsistencyIT.java | 4 +- .../search/GeoShapeWithDocValuesIT.java | 2 +- .../search/ShapeQueryOverShapeTests.java | 2 +- .../spatial/search/ShapeQueryTestCase.java | 14 +- .../spatial/ingest/CircleProcessorTests.java | 8 +- .../xpack/sql/execution/search/Querier.java | 4 +- .../search/extractor/TopHitsAggExtractor.java | 2 +- .../TransformUsageTransportAction.java | 2 +- .../TimeBasedCheckpointProvider.java | 2 +- .../IndexBasedTransformConfigManager.java | 4 +- .../common/AbstractCompositeAggFunction.java | 2 +- .../CompositeBucketsChangeCollector.java | 4 +- .../watcher/WatcherConcreteIndexTests.java | 2 +- .../actions/TimeThrottleIntegrationTests.java | 2 +- .../history/HistoryActionConditionTests.java | 6 +- .../HistoryTemplateEmailMappingsTests.java | 2 +- .../HistoryTemplateHttpMappingsTests.java | 2 +- ...storyTemplateIndexActionMappingsTests.java | 2 +- ...storyTemplateSearchInputMappingsTests.java | 2 +- .../AbstractWatcherIntegrationTestCase.java | 10 +- .../test/integration/BootStrapTests.java | 6 +- .../integration/RejectedExecutionTests.java | 2 +- .../test/integration/SingleNodeTests.java | 2 +- .../test/integration/WatchMetadataTests.java | 2 +- .../transform/TransformIntegrationTests.java | 12 +- .../xpack/watcher/WatcherService.java | 2 +- .../xpack/watcher/common/http/HttpClient.java | 3 +- .../execution/TriggeredWatchStore.java | 2 +- .../input/search/ExecutableSearchInput.java | 2 +- .../actions/TransportQueryWatchesAction.java | 4 +- .../BinaryDvConfirmedAutomatonQuery.java | 65 +- .../wildcard/mapper/WildcardFieldMapper.java | 12 +- .../mapper/WildcardFieldMapperTests.java | 35 +- .../oldrepos/OldRepositoryAccessIT.java | 8 +- .../TokenBackwardsCompatibilityIT.java | 2 +- 662 files changed, 8792 insertions(+), 3627 deletions(-) create mode 100644 docs/changelog/113482.yaml create mode 100644 docs/changelog/113614.yaml create mode 100644 docs/changelog/114124.yaml create mode 100644 docs/changelog/114146.yaml create mode 100644 docs/changelog/114741.yaml create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java delete mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index c76c54a56494e..f7293e051467c 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -56,7 +56,6 @@ steps: matrix: setup: BWC_VERSION: - - 7.17.13 - 8.9.1 - 8.10.0 agents: diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 569e8909e1e12..b294fe97c7e7c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; @@ -217,19 +217,17 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } - RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) - throws IOException { + RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec); } diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 58ccf69406ff2..5388f942be8d7 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -59,10 +59,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String) org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead. -@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead -org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread() -org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter() - @defaultMessage Don't interrupt threads use FutureUtils#cancel(Future) instead java.util.concurrent.Future#cancel(boolean) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 169c187ef115a..6bc3c2ad4d253 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0 +lucene = 10.0.0 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index a523c3ec85ba1..f55d90933ed61 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -62,6 +62,9 @@ 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached +# Lucene 10: apply MADV_NORMAL advice to enable more aggressive readahead +-Dorg.apache.lucene.store.defaultReadAdvice=normal + ## heap dumps # generate a heap dump when an allocation from the Java heap fails; heap dumps diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index b65b974cd6b69..bdb0704fcd880 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.12.0 -:lucene_version_path: 9_12_0 +:lucene_version: 10.0.0 +:lucene_version_path: 10_0_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/113482.yaml b/docs/changelog/113482.yaml new file mode 100644 index 0000000000000..cb5823f0ccfcc --- /dev/null +++ b/docs/changelog/113482.yaml @@ -0,0 +1,27 @@ +pr: 113482 +summary: The 'persian' analyzer has stemmer by default +area: Analysis +type: breaking +issues: +- 113050 +breaking: + title: The 'persian' analyzer has stemmer by default + area: Analysis + details: >- + Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch + exposes as 'persian' analyzer. Existing indices will keep the old + non-stemming behaviour while new indices will see the updated behaviour with + added stemming. + Users that wish to maintain the non-stemming behaviour need to define their + own analyzer as outlined in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + Users that wish to use the new stemming behaviour for existing indices will + have to reindex their data. + impact: >- + Indexing with the 'persian' analyzer will produce slightly different tokens. + Users should check if this impacts their search results. If they wish to + maintain the legacy non-stemming behaviour they can define their own + analyzer equivalent as explained in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + notable: false + diff --git a/docs/changelog/113614.yaml b/docs/changelog/113614.yaml new file mode 100644 index 0000000000000..bd9dcb3e38772 --- /dev/null +++ b/docs/changelog/113614.yaml @@ -0,0 +1,18 @@ +pr: 113614 +summary: The 'german2' stemmer is now an alias for the 'german' snowball stemmer +area: Analysis +type: breaking +issues: [] +breaking: + title: The "german2" snowball stemmer is now an alias for the "german" stemmer + area: Analysis + details: >- + Lucene 10 has merged the improved "german2" snowball language stemmer with the + "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for + "german". This may results in slightly different tokens being generated for + terms with umlaut substitution (like "ue" for "ü" etc...) + impact: >- + Replace usages of "german2" with "german" in analysis configuration. Old + indices that use the "german" stemmer should be reindexed if possible. + notable: false + diff --git a/docs/changelog/114124.yaml b/docs/changelog/114124.yaml new file mode 100644 index 0000000000000..c812c6a468902 --- /dev/null +++ b/docs/changelog/114124.yaml @@ -0,0 +1,18 @@ +pr: 114124 +summary: The Korean dictionary for Nori has been updated +area: Analysis +type: breaking +issues: [] +breaking: + title: The Korean dictionary for Nori has been updated + area: Analysis + details: >- + Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). + For details see https://github.com/apache/lucene/issues/11452. Users + experiencing changes in search behaviour on existing data are advised to + reindex. + impact: >- + The change is small and should generally provide better analysis results. + Existing indices for full-text use cases should be reindexed though. + notable: false + diff --git a/docs/changelog/114146.yaml b/docs/changelog/114146.yaml new file mode 100644 index 0000000000000..be2096a64105c --- /dev/null +++ b/docs/changelog/114146.yaml @@ -0,0 +1,20 @@ +pr: 114146 +summary: Snowball stemmers have been upgraded +area: Analysis +type: breaking +issues: [] +breaking: + title: Snowball stemmers have been upgraded + area: Analysis + details: >- + Lucene 10 ships with an upgrade of its Snowball stemmers. + For details see https://github.com/apache/lucene/issues/13209. Users using + Snowball stemmers that are experiencing changes in search behaviour on + existing data are advised to reindex. + impact: >- + The upgrade should generally provide improved stemming results. Small changes + in token analysis can lead to mismatches with previously index data, so + existing indices using Snowball stemmers as part of their analysis chain + should be reindexed. + notable: false + diff --git a/docs/changelog/114741.yaml b/docs/changelog/114741.yaml new file mode 100644 index 0000000000000..ae45c183cddf9 --- /dev/null +++ b/docs/changelog/114741.yaml @@ -0,0 +1,5 @@ +pr: 114741 +summary: Upgrade to Lucene 10 +area: Search +type: upgrade +issues: [] diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 02980a4ed8a8c..0d3e76f71d238 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -244,11 +244,11 @@ Which responds with: "end_offset": 3, "type": "word", "position": 1, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JKS(Subject case marker)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JKS(Subject case marker)" }, { "token": "깊", @@ -268,11 +268,11 @@ Which responds with: "end_offset": 6, "type": "word", "position": 3, - "leftPOS": "E(Verbal endings)", + "leftPOS": "ETM(Adnominal form transformative ending)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "E(Verbal endings)" + "rightPOS": "ETM(Adnominal form transformative ending)" }, { "token": "나무", @@ -292,11 +292,11 @@ Which responds with: "end_offset": 10, "type": "word", "position": 5, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JX(Auxiliary postpositional particle)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JX(Auxiliary postpositional particle)" } ] }, diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 5273537389e3d..881970787f5a6 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -1430,7 +1430,8 @@ PUT /persian_example "decimal_digit", "arabic_normalization", "persian_normalization", - "persian_stop" + "persian_stop", + "persian_stem" ] } } diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index 4cd088935af19..d9e2120afe6d1 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -173,7 +173,6 @@ http://bvg.udc.es/recursos_lingua/stemming.jsp[`minimal_galician`] (Plural step German:: https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*], https://snowballstem.org/algorithms/german/stemmer.html[`german`], -https://snowballstem.org/algorithms/german2/stemmer.html[`german2`], http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`] Greek:: diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 2cf01b77d57ab..5f98807387280 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -40,14 +40,14 @@ POST _analyze "start_offset": 0, "end_offset": 8, "type": "word", - "position": 0 + "position": 1 }, { "token": "/one/two/three", "start_offset": 0, "end_offset": 14, "type": "word", - "position": 0 + "position": 2 } ] } @@ -144,14 +144,14 @@ POST my-index-000001/_analyze "start_offset": 7, "end_offset": 18, "type": "word", - "position": 0 + "position": 1 }, { "token": "/three/four/five", "start_offset": 7, "end_offset": 23, "type": "word", - "position": 0 + "position": 2 } ] } @@ -178,14 +178,14 @@ If we were to set `reverse` to `true`, it would produce the following: [[analysis-pathhierarchy-tokenizer-detailed-examples]] === Detailed examples -A common use-case for the `path_hierarchy` tokenizer is filtering results by -file paths. If indexing a file path along with the data, the use of the -`path_hierarchy` tokenizer to analyze the path allows filtering the results +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results by different parts of the file path string. This example configures an index to have two custom analyzers and applies -those analyzers to multifields of the `file_path` text field that will +those analyzers to multifields of the `file_path` text field that will store filenames. One of the two analyzers uses reverse tokenization. Some sample documents are then indexed to represent some file paths for photos inside photo folders of two different users. @@ -264,8 +264,8 @@ POST file-path-test/_doc/5 -------------------------------------------------- -A search for a particular file path string against the text field matches all -the example documents, with Bob's documents ranking highest due to `bob` also +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also being one of the terms created by the standard analyzer boosting relevance for Bob's documents. @@ -301,7 +301,7 @@ GET file-path-test/_search With the reverse parameter for this tokenizer, it's also possible to match from the other end of the file path, such as individual file names or a deep level subdirectory. The following example shows a search for all files named -`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field configured to use the reverse parameter in the mapping. @@ -342,7 +342,7 @@ POST file-path-test/_analyze It's also useful to be able to filter with file paths when combined with other -types of searches, such as this example looking for any files paths with `16` +types of searches, such as this example looking for any files paths with `16` that also must be in Alice's photo directory. [source,console] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808c..5f1a0ccfdd6b4 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "query" : [ { "type" : "DocAndScoreQuery", - "description" : "DocAndScore[100]", + "description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825", "time_in_nanos" : 444414, "breakdown" : { "set_min_competitive_score_count" : 0, diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0156f13b4b05d..4d9b96184d07a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2824,129 +2824,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java index e2aea6b3ebd9f..4ed60b2f5e8b2 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -39,7 +39,7 @@ static Optional instance() { Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ); @@ -52,9 +52,5 @@ Optional getInt7SQVectorScorerSupplier( * @param queryVector the query vector * @return an optional containing the vector scorer, or empty */ - Optional getInt7SQVectorScorer( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ); + Optional getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a22d787980252..6248902c32e7a 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); @@ -34,7 +34,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { throw new UnsupportedOperationException("should not reach here"); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a65fe582087d9..a863d9e3448ca 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.simdvec.internal.Int7SQVectorScorer; import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; @@ -38,7 +38,7 @@ private VectorScorerFactoryImpl() {} public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { input = FilterIndexInput.unwrapOnlyTest(input); @@ -57,7 +57,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { return Int7SQVectorScorer.create(sim, values, queryVector); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index 0b41436ce2242..e02df124ad0f0 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -11,18 +11,14 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; public final class Int7SQVectorScorer { // Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { return Optional.empty(); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java index f6d874cd3e728..198e10406056e 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java @@ -12,7 +12,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import java.io.IOException; @@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS final int maxOrd; final float scoreCorrectionConstant; final MemorySegmentAccessInput input; - final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds final ScalarQuantizedVectorSimilarity fallbackScorer; protected Int7SQVectorScorerSupplier( MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant, ScalarQuantizedVectorSimilarity fallbackScorer ) { @@ -104,11 +104,7 @@ public float score(int node) throws IOException { public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { - public EuclideanSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS)); } @@ -127,11 +123,7 @@ public EuclideanSupplier copy() { public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { - public DotProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS)); } @@ -151,11 +143,7 @@ public DotProductSupplier copy() { public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { - public MaxInnerProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS)); } diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index c9659ea1af9a8..3d0e1e71a3744 100644 --- a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr byte[] scratch; /** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { checkDimensions(queryVector.length, values.dimension()); var input = values.getSlice(); if (input == null) { @@ -63,12 +59,7 @@ public static Optional create( }; } - Int7SQVectorScorer( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - byte[] queryVector, - float queryCorrection - ) { + Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) { super(values); this.input = input; assert queryVector.length == values.getVectorByteLength(); @@ -105,7 +96,7 @@ final void checkOrdinal(int ord) { } public static final class DotProductScorer extends Int7SQVectorScorer { - public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -122,7 +113,7 @@ public float score(int node) throws IOException { } public static final class EuclideanScorer extends Int7SQVectorScorer { - public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -136,7 +127,7 @@ public float score(int node) throws IOException { } public static final class MaxInnerProductScorer extends Int7SQVectorScorer { - public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) { + public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) { super(in, values, query, corr); } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index db57dc936e794..0f967127f6f2c 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -431,14 +431,13 @@ public Optional call() { } } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index b16c6eaaaa1d1..f4f7e787d2b7b 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -33,3 +33,7 @@ dependencies { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java new file mode 100644 index 0000000000000..0eb8d916307ae --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java @@ -0,0 +1,741 @@ +/* + * @notice + * Generated by Snowball 2.0.0 - https://snowballstem.org/ + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ + +package org.elasticsearch.analysis.common; + +import org.tartarus.snowball.Among; + +/** +* This class implements the stemming algorithm defined by a snowball script. +* NOTE: This is the RomanianStemmer used in Lucene 9 and should only be used for backwards compatibility +*/ +@SuppressWarnings("checkstyle:DescendantToken") +class LegacyRomanianStemmer extends org.tartarus.snowball.SnowballStemmer { + + private static final java.lang.invoke.MethodHandles.Lookup methodObject = java.lang.invoke.MethodHandles.lookup(); + + private static final Among a_0[] = { new Among("", -1, 3), new Among("I", 0, 1), new Among("U", 0, 2) }; + + private static final Among a_1[] = { + new Among("ea", -1, 3), + new Among("a\u0163ia", -1, 7), + new Among("aua", -1, 2), + new Among("iua", -1, 4), + new Among("a\u0163ie", -1, 7), + new Among("ele", -1, 3), + new Among("ile", -1, 5), + new Among("iile", 6, 4), + new Among("iei", -1, 4), + new Among("atei", -1, 6), + new Among("ii", -1, 4), + new Among("ului", -1, 1), + new Among("ul", -1, 1), + new Among("elor", -1, 3), + new Among("ilor", -1, 4), + new Among("iilor", 14, 4) }; + + private static final Among a_2[] = { + new Among("icala", -1, 4), + new Among("iciva", -1, 4), + new Among("ativa", -1, 5), + new Among("itiva", -1, 6), + new Among("icale", -1, 4), + new Among("a\u0163iune", -1, 5), + new Among("i\u0163iune", -1, 6), + new Among("atoare", -1, 5), + new Among("itoare", -1, 6), + new Among("\u0103toare", -1, 5), + new Among("icitate", -1, 4), + new Among("abilitate", -1, 1), + new Among("ibilitate", -1, 2), + new Among("ivitate", -1, 3), + new Among("icive", -1, 4), + new Among("ative", -1, 5), + new Among("itive", -1, 6), + new Among("icali", -1, 4), + new Among("atori", -1, 5), + new Among("icatori", 18, 4), + new Among("itori", -1, 6), + new Among("\u0103tori", -1, 5), + new Among("icitati", -1, 4), + new Among("abilitati", -1, 1), + new Among("ivitati", -1, 3), + new Among("icivi", -1, 4), + new Among("ativi", -1, 5), + new Among("itivi", -1, 6), + new Among("icit\u0103i", -1, 4), + new Among("abilit\u0103i", -1, 1), + new Among("ivit\u0103i", -1, 3), + new Among("icit\u0103\u0163i", -1, 4), + new Among("abilit\u0103\u0163i", -1, 1), + new Among("ivit\u0103\u0163i", -1, 3), + new Among("ical", -1, 4), + new Among("ator", -1, 5), + new Among("icator", 35, 4), + new Among("itor", -1, 6), + new Among("\u0103tor", -1, 5), + new Among("iciv", -1, 4), + new Among("ativ", -1, 5), + new Among("itiv", -1, 6), + new Among("ical\u0103", -1, 4), + new Among("iciv\u0103", -1, 4), + new Among("ativ\u0103", -1, 5), + new Among("itiv\u0103", -1, 6) }; + + private static final Among a_3[] = { + new Among("ica", -1, 1), + new Among("abila", -1, 1), + new Among("ibila", -1, 1), + new Among("oasa", -1, 1), + new Among("ata", -1, 1), + new Among("ita", -1, 1), + new Among("anta", -1, 1), + new Among("ista", -1, 3), + new Among("uta", -1, 1), + new Among("iva", -1, 1), + new Among("ic", -1, 1), + new Among("ice", -1, 1), + new Among("abile", -1, 1), + new Among("ibile", -1, 1), + new Among("isme", -1, 3), + new Among("iune", -1, 2), + new Among("oase", -1, 1), + new Among("ate", -1, 1), + new Among("itate", 17, 1), + new Among("ite", -1, 1), + new Among("ante", -1, 1), + new Among("iste", -1, 3), + new Among("ute", -1, 1), + new Among("ive", -1, 1), + new Among("ici", -1, 1), + new Among("abili", -1, 1), + new Among("ibili", -1, 1), + new Among("iuni", -1, 2), + new Among("atori", -1, 1), + new Among("osi", -1, 1), + new Among("ati", -1, 1), + new Among("itati", 30, 1), + new Among("iti", -1, 1), + new Among("anti", -1, 1), + new Among("isti", -1, 3), + new Among("uti", -1, 1), + new Among("i\u015Fti", -1, 3), + new Among("ivi", -1, 1), + new Among("it\u0103i", -1, 1), + new Among("o\u015Fi", -1, 1), + new Among("it\u0103\u0163i", -1, 1), + new Among("abil", -1, 1), + new Among("ibil", -1, 1), + new Among("ism", -1, 3), + new Among("ator", -1, 1), + new Among("os", -1, 1), + new Among("at", -1, 1), + new Among("it", -1, 1), + new Among("ant", -1, 1), + new Among("ist", -1, 3), + new Among("ut", -1, 1), + new Among("iv", -1, 1), + new Among("ic\u0103", -1, 1), + new Among("abil\u0103", -1, 1), + new Among("ibil\u0103", -1, 1), + new Among("oas\u0103", -1, 1), + new Among("at\u0103", -1, 1), + new Among("it\u0103", -1, 1), + new Among("ant\u0103", -1, 1), + new Among("ist\u0103", -1, 3), + new Among("ut\u0103", -1, 1), + new Among("iv\u0103", -1, 1) }; + + private static final Among a_4[] = { + new Among("ea", -1, 1), + new Among("ia", -1, 1), + new Among("esc", -1, 1), + new Among("\u0103sc", -1, 1), + new Among("ind", -1, 1), + new Among("\u00E2nd", -1, 1), + new Among("are", -1, 1), + new Among("ere", -1, 1), + new Among("ire", -1, 1), + new Among("\u00E2re", -1, 1), + new Among("se", -1, 2), + new Among("ase", 10, 1), + new Among("sese", 10, 2), + new Among("ise", 10, 1), + new Among("use", 10, 1), + new Among("\u00E2se", 10, 1), + new Among("e\u015Fte", -1, 1), + new Among("\u0103\u015Fte", -1, 1), + new Among("eze", -1, 1), + new Among("ai", -1, 1), + new Among("eai", 19, 1), + new Among("iai", 19, 1), + new Among("sei", -1, 2), + new Among("e\u015Fti", -1, 1), + new Among("\u0103\u015Fti", -1, 1), + new Among("ui", -1, 1), + new Among("ezi", -1, 1), + new Among("\u00E2i", -1, 1), + new Among("a\u015Fi", -1, 1), + new Among("se\u015Fi", -1, 2), + new Among("ase\u015Fi", 29, 1), + new Among("sese\u015Fi", 29, 2), + new Among("ise\u015Fi", 29, 1), + new Among("use\u015Fi", 29, 1), + new Among("\u00E2se\u015Fi", 29, 1), + new Among("i\u015Fi", -1, 1), + new Among("u\u015Fi", -1, 1), + new Among("\u00E2\u015Fi", -1, 1), + new Among("a\u0163i", -1, 2), + new Among("ea\u0163i", 38, 1), + new Among("ia\u0163i", 38, 1), + new Among("e\u0163i", -1, 2), + new Among("i\u0163i", -1, 2), + new Among("\u00E2\u0163i", -1, 2), + new Among("ar\u0103\u0163i", -1, 1), + new Among("ser\u0103\u0163i", -1, 2), + new Among("aser\u0103\u0163i", 45, 1), + new Among("seser\u0103\u0163i", 45, 2), + new Among("iser\u0103\u0163i", 45, 1), + new Among("user\u0103\u0163i", 45, 1), + new Among("\u00E2ser\u0103\u0163i", 45, 1), + new Among("ir\u0103\u0163i", -1, 1), + new Among("ur\u0103\u0163i", -1, 1), + new Among("\u00E2r\u0103\u0163i", -1, 1), + new Among("am", -1, 1), + new Among("eam", 54, 1), + new Among("iam", 54, 1), + new Among("em", -1, 2), + new Among("asem", 57, 1), + new Among("sesem", 57, 2), + new Among("isem", 57, 1), + new Among("usem", 57, 1), + new Among("\u00E2sem", 57, 1), + new Among("im", -1, 2), + new Among("\u00E2m", -1, 2), + new Among("\u0103m", -1, 2), + new Among("ar\u0103m", 65, 1), + new Among("ser\u0103m", 65, 2), + new Among("aser\u0103m", 67, 1), + new Among("seser\u0103m", 67, 2), + new Among("iser\u0103m", 67, 1), + new Among("user\u0103m", 67, 1), + new Among("\u00E2ser\u0103m", 67, 1), + new Among("ir\u0103m", 65, 1), + new Among("ur\u0103m", 65, 1), + new Among("\u00E2r\u0103m", 65, 1), + new Among("au", -1, 1), + new Among("eau", 76, 1), + new Among("iau", 76, 1), + new Among("indu", -1, 1), + new Among("\u00E2ndu", -1, 1), + new Among("ez", -1, 1), + new Among("easc\u0103", -1, 1), + new Among("ar\u0103", -1, 1), + new Among("ser\u0103", -1, 2), + new Among("aser\u0103", 84, 1), + new Among("seser\u0103", 84, 2), + new Among("iser\u0103", 84, 1), + new Among("user\u0103", 84, 1), + new Among("\u00E2ser\u0103", 84, 1), + new Among("ir\u0103", -1, 1), + new Among("ur\u0103", -1, 1), + new Among("\u00E2r\u0103", -1, 1), + new Among("eaz\u0103", -1, 1) }; + + private static final Among a_5[] = { + new Among("a", -1, 1), + new Among("e", -1, 1), + new Among("ie", 1, 1), + new Among("i", -1, 1), + new Among("\u0103", -1, 1) }; + + private static final char g_v[] = { 17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 32, 0, 0, 4 }; + + private boolean B_standard_suffix_removed; + private int I_p2; + private int I_p1; + private int I_pV; + + private boolean r_prelude() { + while (true) { + int v_1 = cursor; + lab0: { + golab1: while (true) { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + bra = cursor; + lab3: { + int v_3 = cursor; + lab4: { + if (!(eq_s("u"))) { + break lab4; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab4; + } + slice_from("U"); + break lab3; + } + cursor = v_3; + if (!(eq_s("i"))) { + break lab2; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + slice_from("I"); + } + cursor = v_2; + break golab1; + } + cursor = v_2; + if (cursor >= limit) { + break lab0; + } + cursor++; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_mark_regions() { + I_pV = limit; + I_p1 = limit; + I_p2 = limit; + int v_1 = cursor; + lab0: { + lab1: { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + lab3: { + int v_3 = cursor; + lab4: { + if (!(out_grouping(g_v, 97, 259))) { + break lab4; + } + golab5: while (true) { + lab6: { + if (!(in_grouping(g_v, 97, 259))) { + break lab6; + } + break golab5; + } + if (cursor >= limit) { + break lab4; + } + cursor++; + } + break lab3; + } + cursor = v_3; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + golab7: while (true) { + lab8: { + if (!(out_grouping(g_v, 97, 259))) { + break lab8; + } + break golab7; + } + if (cursor >= limit) { + break lab2; + } + cursor++; + } + } + break lab1; + } + cursor = v_2; + if (!(out_grouping(g_v, 97, 259))) { + break lab0; + } + lab9: { + int v_6 = cursor; + lab10: { + if (!(out_grouping(g_v, 97, 259))) { + break lab10; + } + golab11: while (true) { + lab12: { + if (!(in_grouping(g_v, 97, 259))) { + break lab12; + } + break golab11; + } + if (cursor >= limit) { + break lab10; + } + cursor++; + } + break lab9; + } + cursor = v_6; + if (!(in_grouping(g_v, 97, 259))) { + break lab0; + } + if (cursor >= limit) { + break lab0; + } + cursor++; + } + } + I_pV = cursor; + } + cursor = v_1; + int v_8 = cursor; + lab13: { + golab14: while (true) { + lab15: { + if (!(in_grouping(g_v, 97, 259))) { + break lab15; + } + break golab14; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab16: while (true) { + lab17: { + if (!(out_grouping(g_v, 97, 259))) { + break lab17; + } + break golab16; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p1 = cursor; + golab18: while (true) { + lab19: { + if (!(in_grouping(g_v, 97, 259))) { + break lab19; + } + break golab18; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab20: while (true) { + lab21: { + if (!(out_grouping(g_v, 97, 259))) { + break lab21; + } + break golab20; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p2 = cursor; + } + cursor = v_8; + return true; + } + + private boolean r_postlude() { + int among_var; + while (true) { + int v_1 = cursor; + lab0: { + bra = cursor; + among_var = find_among(a_0); + if (among_var == 0) { + break lab0; + } + ket = cursor; + switch (among_var) { + case 1: + slice_from("i"); + break; + case 2: + slice_from("u"); + break; + case 3: + if (cursor >= limit) { + break lab0; + } + cursor++; + break; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_RV() { + if (!(I_pV <= cursor)) { + return false; + } + return true; + } + + private boolean r_R1() { + if (!(I_p1 <= cursor)) { + return false; + } + return true; + } + + private boolean r_R2() { + if (!(I_p2 <= cursor)) { + return false; + } + return true; + } + + private boolean r_step_0() { + int among_var; + ket = cursor; + among_var = find_among_b(a_1); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + slice_from("a"); + break; + case 3: + slice_from("e"); + break; + case 4: + slice_from("i"); + break; + case 5: { + int v_1 = limit - cursor; + lab0: { + if (!(eq_s_b("ab"))) { + break lab0; + } + return false; + } + cursor = limit - v_1; + } + slice_from("i"); + break; + case 6: + slice_from("at"); + break; + case 7: + slice_from("a\u0163i"); + break; + } + return true; + } + + private boolean r_combo_suffix() { + int among_var; + int v_1 = limit - cursor; + ket = cursor; + among_var = find_among_b(a_2); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_from("abil"); + break; + case 2: + slice_from("ibil"); + break; + case 3: + slice_from("iv"); + break; + case 4: + slice_from("ic"); + break; + case 5: + slice_from("at"); + break; + case 6: + slice_from("it"); + break; + } + B_standard_suffix_removed = true; + cursor = limit - v_1; + return true; + } + + private boolean r_standard_suffix() { + int among_var; + B_standard_suffix_removed = false; + while (true) { + int v_1 = limit - cursor; + lab0: { + if (!r_combo_suffix()) { + break lab0; + } + continue; + } + cursor = limit - v_1; + break; + } + ket = cursor; + among_var = find_among_b(a_3); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R2()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + if (!(eq_s_b("\u0163"))) { + return false; + } + bra = cursor; + slice_from("t"); + break; + case 3: + slice_from("ist"); + break; + } + B_standard_suffix_removed = true; + return true; + } + + private boolean r_verb_suffix() { + int among_var; + if (cursor < I_pV) { + return false; + } + int v_2 = limit_backward; + limit_backward = I_pV; + ket = cursor; + among_var = find_among_b(a_4); + if (among_var == 0) { + limit_backward = v_2; + return false; + } + bra = cursor; + switch (among_var) { + case 1: + lab0: { + int v_3 = limit - cursor; + lab1: { + if (!(out_grouping_b(g_v, 97, 259))) { + break lab1; + } + break lab0; + } + cursor = limit - v_3; + if (!(eq_s_b("u"))) { + limit_backward = v_2; + return false; + } + } + slice_del(); + break; + case 2: + slice_del(); + break; + } + limit_backward = v_2; + return true; + } + + private boolean r_vowel_suffix() { + ket = cursor; + if (find_among_b(a_5) == 0) { + return false; + } + bra = cursor; + if (!r_RV()) { + return false; + } + slice_del(); + return true; + } + + @Override + public boolean stem() { + int v_1 = cursor; + r_prelude(); + cursor = v_1; + r_mark_regions(); + limit_backward = cursor; + cursor = limit; + int v_3 = limit - cursor; + r_step_0(); + cursor = limit - v_3; + int v_4 = limit - cursor; + r_standard_suffix(); + cursor = limit - v_4; + int v_5 = limit - cursor; + lab0: { + lab1: { + int v_6 = limit - cursor; + lab2: { + if (!(B_standard_suffix_removed)) { + break lab2; + } + break lab1; + } + cursor = limit - v_6; + if (!r_verb_suffix()) { + break lab0; + } + } + } + cursor = limit - v_5; + int v_7 = limit - cursor; + r_vowel_suffix(); + cursor = limit - v_7; + cursor = limit_backward; + int v_8 = cursor; + r_postlude(); + cursor = v_8; + return true; + } + + @Override + public boolean equals(Object o) { + return o instanceof LegacyRomanianStemmer; + } + + @Override + public int hashCode() { + return LegacyRomanianStemmer.class.getName().hashCode(); + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 9ea3a9fa4eee9..917a45188123c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -9,24 +9,72 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; +import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.fa.PersianAnalyzer; +import org.apache.lucene.analysis.fa.PersianCharFilter; +import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +import java.io.Reader; - private final PersianAnalyzer analyzer; +public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { + + private final StopwordAnalyzerBase analyzer; PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10 this analyzer contains stemming by default + analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + } else { + // for older index versions we need the old analyzer behaviour without stemming + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + /* + * the order here is important: the stopword list is normalized with the + * above! + */ + return new TokenStreamComponents(source, new StopFilter(result, stopwords)); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + TokenStream result = new LowerCaseFilter(in); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + return result; + } + + protected Reader initReader(String fieldName, Reader reader) { + return new PersianCharFilter(reader); + } + }; + } } @Override - public PersianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index cf33a38abd634..6c28df83a6d36 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -9,28 +9,60 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.LowerCaseFilter; +import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.snowball.SnowballFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { - private final RomanianAnalyzer analyzer; + private final StopwordAnalyzerBase analyzer; RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new RomanianAnalyzer( - Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), - Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET) - ); + CharArraySet stopwords = Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()); + CharArraySet stemExclusionSet = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10, this analyzer a modern unicode form and normalizes cedilla forms to forms with commas + analyzer = new RomanianAnalyzer(stopwords, stemExclusionSet); + } else { + // for older index versions we need the old behaviour without normalization + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new StopFilter(result, stopwords); + if (stemExclusionSet.isEmpty() == false) { + result = new SetKeywordMarkerFilter(result, stemExclusionSet); + } + result = new SnowballFilter(result, new LegacyRomanianStemmer()); + return new TokenStreamComponents(source, result); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + return new LowerCaseFilter(in); + } + }; + + } } @Override - public RomanianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 1c71c64311517..7548c8ad2b88b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; @@ -38,8 +39,9 @@ import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; -import org.apache.lucene.analysis.no.NorwegianLightStemmer; +import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; +import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseStemFilter; @@ -62,14 +64,11 @@ import org.tartarus.snowball.ext.EstonianStemmer; import org.tartarus.snowball.ext.FinnishStemmer; import org.tartarus.snowball.ext.FrenchStemmer; -import org.tartarus.snowball.ext.German2Stemmer; import org.tartarus.snowball.ext.GermanStemmer; import org.tartarus.snowball.ext.HungarianStemmer; import org.tartarus.snowball.ext.IrishStemmer; import org.tartarus.snowball.ext.ItalianStemmer; -import org.tartarus.snowball.ext.KpStemmer; import org.tartarus.snowball.ext.LithuanianStemmer; -import org.tartarus.snowball.ext.LovinsStemmer; import org.tartarus.snowball.ext.NorwegianStemmer; import org.tartarus.snowball.ext.PortugueseStemmer; import org.tartarus.snowball.ext.RomanianStemmer; @@ -80,6 +79,7 @@ import org.tartarus.snowball.ext.TurkishStemmer; import java.io.IOException; +import java.util.Collections; public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @@ -87,27 +87,15 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream(); - private String language; + private final String language; + + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class); StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); - if ("lovins".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "lovins_deprecation", - "The [lovins] stemmer is deprecated and will be removed in a future version." - ); - } - if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "dutch_kp_deprecation", - "The [dutch_kp] stemmer is deprecated and will be removed in a future version." - ); - } } @Override @@ -135,8 +123,17 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new KpStemmer()); - + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "dutch_kp_deprecation", + "The [dutch_kp] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -145,7 +142,17 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new LovinsStemmer()); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "lovins_deprecation", + "The [lovins] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { @@ -185,7 +192,13 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new German2Stemmer()); + DEPRECATION_LOGGER.critical( + DeprecationCategory.ANALYSIS, + "german2_stemmer_deprecation", + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); } else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) { @@ -231,10 +244,13 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn")); + return factory.create(tokenStream); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); - + NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory( + Collections.singletonMap("variant", "nn") + ); + return factory.create(tokenStream); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { return new PersianStemFilter(tokenStream); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index b406fa8335779..0d936666e92cd 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -278,7 +278,7 @@ public void testPhrasePrefix() throws IOException { boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); for (int i = 0; i < 2; i++) { assertHighlight( resp, diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java new file mode 100644 index 0000000000000..7b962538c2a10 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Tests Persian Analyzer factory and behavioural changes with Lucene 10 + */ +public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase { + + public void testPersianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زياد", "خوانده" }); + } + + public void testPersianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زيادي", "خوانده" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java new file mode 100644 index 0000000000000..1af44bc71f35d --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Verifies the behavior of Romanian analyzer. + */ +public class RomanianAnalyzerTests extends ESTokenStreamTestCase { + + public void testRomanianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenț" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoștinț" }); + } + + public void testRomanianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenţ" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoştinţ" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index 8f3d52f0174c6..bb06c221873b5 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; @@ -16,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -103,6 +105,42 @@ public void testMultipleLanguagesThrowsException() throws IOException { assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage()); } + public void testGermanAndGerman2Stemmer() throws IOException { + IndexVersion v = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current()); + Analyzer analyzer = createGermanStemmer("german", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + + analyzer = createGermanStemmer("german2", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + assertWarnings( + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + } + + private static Analyzer createGermanStemmer(String variant, IndexVersion v) throws IOException { + + Settings settings = Settings.builder() + .put("index.analysis.filter.my_german.type", "stemmer") + .put("index.analysis.filter.my_german.language", variant) + .put("index.analysis.analyzer.my_german.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_german.filter", "my_german") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_german"); + assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class)); + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader("Buecher oder Bücher")); + TokenStream create = tokenFilter.create(tokenizer); + assertThat(create, instanceOf(SnowballFilter.class)); + IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; + NamedAnalyzer analyzer = indexAnalyzers.get("my_german"); + return analyzer; + } + public void testKpDeprecation() throws IOException { IndexVersion v = IndexVersionUtils.randomVersion(random()); Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index c03bdb3111050..8930e485aa249 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -901,6 +901,31 @@ - length: { tokens: 1 } - match: { tokens.0.token: خورد } +--- +"persian stemming": + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "test requires persian analyzer stemming capabilities that come with Lucene 10" + + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + index: test + body: + text: كتابها + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: كتاب } + --- "portuguese": - do: @@ -948,7 +973,7 @@ text: absenţa analyzer: romanian - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } - do: indices.analyze: @@ -957,7 +982,7 @@ text: absenţa analyzer: my_analyzer - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } --- "russian": diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 8f1c0cf515e14..cb74d62137815 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Build; @@ -440,13 +439,13 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(MinimizationOperations.minimize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { final List automata = patterns.stream().map(s -> { final String regex = s.replace(".", "\\.").replace("*", ".*"); - return new RegExp(regex).toAutomaton(); + return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); }).toList(); if (automata.isEmpty()) { return null; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 8e7ecfa49f144..777ddc28fefdc 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1706,7 +1706,7 @@ public void testSegmentsSortedOnTimestampDesc() throws Exception { assertResponse( prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs), resp -> { - assertEquals(totalDocs, resp.getHits().getTotalHits().value); + assertEquals(totalDocs, resp.getHits().getTotalHits().value()); SearchHit[] hits = resp.getHits().getHits(); assertEquals(totalDocs, hits.length); @@ -2027,7 +2027,7 @@ static void indexDocs(String dataStream, int numDocs) { static void verifyDocs(String dataStream, long expectedNumHits, List expectedIndices) { assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex()))); }); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 686e253d1d173..a2557a4de6e6d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -545,7 +545,7 @@ public void testTrimId() throws Exception { var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); assertResponse(client().search(searchRequest), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk)); String id = searchResponse.getHits().getHits()[0].getId(); assertThat(id, notNullValue()); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 6942cc3733d1e..f8c8d2bd359f3 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -256,8 +256,8 @@ public void testGeoIpDatabasesDownload() throws Exception { res -> { try { TotalHits totalHits = res.getHits().getTotalHits(); - assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation); - assertEquals(size, totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation()); + assertEquals(size, totalHits.value()); assertEquals(size, res.getHits().getHits().length); List data = new ArrayList<>(); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 570c2a5f3783a..df6780aba7222 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -81,7 +81,7 @@ public void testBasic() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'] + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -91,7 +91,7 @@ public void testFunction() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -102,7 +102,7 @@ public void testBasicUsingDotValue() throws Exception { prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -125,7 +125,7 @@ public void testScore() throws Exception { assertResponse(req, rsp -> { assertNoFailures(rsp); SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); @@ -148,25 +148,25 @@ public void testDateMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -182,25 +182,25 @@ public void testDateObjectMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.year"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -238,7 +238,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -246,7 +246,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -254,7 +254,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -262,7 +262,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -270,7 +270,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -278,7 +278,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -286,7 +286,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -295,7 +295,7 @@ public void testMultiValueMethods() throws Exception { // make sure count() works for missing assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -304,7 +304,7 @@ public void testMultiValueMethods() throws Exception { // make sure .empty works in the same way assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -342,7 +342,7 @@ public void testSparseField() throws Exception { ); assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); @@ -378,7 +378,7 @@ public void testParams() throws Exception { String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -501,7 +501,7 @@ public void testSpecialValueVariable() throws Exception { ); assertResponse(req, rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); Stats stats = rsp.getAggregations().get("int_agg"); assertEquals(39.0, stats.getMax(), 0.0001); @@ -655,22 +655,22 @@ public void testGeo() throws Exception { refresh(); // access .lat assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .lon assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // call haversin assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); }); } @@ -693,14 +693,14 @@ public void testBoolean() throws Exception { ); // access .value assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -708,7 +708,7 @@ public void testBoolean() throws Exception { // ternary operator // vip's have a 50% discount assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -727,7 +727,7 @@ public void testFilterScript() throws Exception { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); assertNoFailuresAndResponse(builder, rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java index 0952ff8fe856f..bb714d4674ed6 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java @@ -17,6 +17,8 @@ import org.apache.lucene.search.SortField; import org.elasticsearch.script.DoubleValuesScript; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.function.Function; /** @@ -37,12 +39,20 @@ public DoubleValuesScript newInstance() { return new DoubleValuesScript() { @Override public double execute() { - return exprScript.evaluate(new DoubleValues[0]); + try { + return exprScript.evaluate(new DoubleValues[0]); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override public double evaluate(DoubleValues[] functionValues) { - return exprScript.evaluate(functionValues); + try { + return exprScript.evaluate(functionValues); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index b306f104d7ba5..58cd9ea293aef 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -24,7 +24,6 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; -import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.DoubleValuesScript; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FilterScript; @@ -36,9 +35,8 @@ import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; +import java.io.UncheckedIOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; @@ -156,36 +154,14 @@ public String getType() { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - // classloader created here - final SecurityManager sm = System.getSecurityManager(); SpecialPermission.check(); - Expression expr = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Expression run() { - try { - // snapshot our context here, we check on behalf of the expression - AccessControlContext engineContext = AccessController.getContext(); - ClassLoader loader = getClass().getClassLoader(); - if (sm != null) { - loader = new ClassLoader(loader) { - @Override - protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - engineContext.checkPermission(new ClassPermission(name)); - } catch (SecurityException e) { - throw new ClassNotFoundException(name, e); - } - return super.loadClass(name, resolve); - } - }; - } - // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); - } catch (ParseException e) { - throw convertToScriptException("compile error", scriptSource, scriptSource, e); - } - } - }); + Expression expr; + try { + // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here + expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS); + } catch (ParseException e) { + throw convertToScriptException("compile error", scriptSource, scriptSource, e); + } if (contexts.containsKey(context) == false) { throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -233,7 +209,11 @@ public Double execute() { placeholder.setValue(((Number) value).doubleValue()); } }); - return expr.evaluate(functionValuesArray); + try { + return expr.evaluate(functionValuesArray); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } }; }; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 3efcfde684ebc..a3c0c60d75436 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -138,7 +138,7 @@ protected void assertEqualInstances(SearchTemplateResponse expectedInstance, Sea SearchResponse expectedResponse = expectedInstance.getResponse(); SearchResponse newResponse = newInstance.getResponse(); - assertEquals(expectedResponse.getHits().getTotalHits().value, newResponse.getHits().getTotalHits().value); + assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value()); assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index fed598e46fbd9..cbb0e19d64a6e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -74,11 +74,6 @@ public void testMapBasic() throws IOException { Map state = new HashMap<>(); Scorable scorer = new Scorable() { - @Override - public int docID() { - return 0; - } - @Override public float score() { return 0.5f; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 01a9e995450aa..7edd6d5303252 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -85,7 +85,7 @@ public void testBasics() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } @@ -134,7 +134,7 @@ public void testWeightScript() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index 19173c650c24a..1c6ffe75e3fd2 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -43,7 +43,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } @@ -52,7 +52,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } @@ -67,7 +67,7 @@ public void testRankFeaturesTermQuery() throws IOException { .minimumShouldMatch(1) ), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); for (SearchHit hit : searchResponse.getHits().getHits()) { if (hit.getId().equals("all")) { assertThat(hit.getScore(), equalTo(50f)); @@ -83,7 +83,7 @@ public void testRankFeaturesTermQuery() throws IOException { ); assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 4fc4fc69e0ee8..97c97a643e9c8 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -203,7 +203,7 @@ private SearchRequestBuilder prepareTokenCountFieldMapperSearch() { } private void assertSearchReturns(SearchResponse result, String... ids) { - assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length)); + assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length)); assertThat(result.getHits().getHits().length, equalTo(ids.length)); List foundIds = new ArrayList<>(); for (SearchHit hit : result.getHits()) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index bce6ffb5e0ea3..f277d28eed922 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -468,8 +468,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index d16034c5de2fd..a992f68d93d9e 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TwoPhaseIterator; @@ -266,7 +267,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); } @@ -286,15 +287,26 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException { - final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null; - if (approximationScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null; + if (approximationSupplier == null) { return null; } - final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximationScorer = approximationSupplier.get(leadCost); + final DocIdSetIterator approximation = approximationScorer.iterator(); + final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); + final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); + return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -310,7 +322,7 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1); return innerWeight.matches(context, doc); } - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L); if (scorer == null) { return null; } @@ -336,14 +348,12 @@ private class RuntimePhraseScorer extends Scorer { private float freq; private RuntimePhraseScorer( - Weight weight, DocIdSetIterator approximation, LeafSimScorer scorer, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { - super(weight); this.scorer = scorer; this.valueFetcher = valueFetcher; this.field = field; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 922b92263d712..1eb6083cfe453 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -89,8 +89,8 @@ private void assertPhraseQuery(MapperService mapperService) throws IOException { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); - assertThat(docs.totalHits.value, equalTo(1L)); - assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(docs.totalHits.value(), equalTo(1L)); + assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java index 84139409e8bc6..a49e0c2a3e38d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java @@ -61,7 +61,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = context -> docID -> { sourceFetchCount.incrementAndGet(); - return Collections.singletonList(context.reader().document(docID).get("body")); + return Collections.singletonList(context.reader().storedFields().document(docID).get("body")); }; public void testTerm() throws Exception { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java index 0fef801b22009..2befcfb576017 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java @@ -41,7 +41,7 @@ public class SourceIntervalsSourceTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = - context -> docID -> Collections.singletonList(context.reader().document(docID).get("body")); + context -> docID -> Collections.singletonList(context.reader().storedFields().document(docID).get("body")); public void testIntervals() throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index ad8e252e3fd63..9c0e5ce071dc6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -115,7 +115,7 @@ public void testParentWithMultipleBuckets() { logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value()); for (SearchHit searchHit : topHits.getHits()) { logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } @@ -129,7 +129,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(2L)); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); @@ -141,7 +141,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); categoryBucket = categoryTerms.getBucketByKey("c"); @@ -152,7 +152,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); } ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index 872165014f5a4..cce0ef06cbf62 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -107,7 +107,7 @@ public void testMultiLevelChild() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -117,7 +117,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -127,7 +127,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -135,7 +135,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -143,7 +143,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -161,7 +161,7 @@ public void test2744() throws IOException { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } ); @@ -182,7 +182,7 @@ public void testSimpleChildQuery() throws Exception { // TEST FETCHING _parent from child assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -195,7 +195,7 @@ public void testSimpleChildQuery() throws Exception { boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -208,7 +208,7 @@ public void testSimpleChildQuery() throws Exception { // HAS CHILD assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { assertHitCount(response, 1L); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); }); @@ -307,8 +307,8 @@ public void testHasParentFilter() throws Exception { ).setSize(numChildDocsPerParent), response -> { Set childIds = parentToChildrenEntry.getValue(); - assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value(); i++) { assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); } @@ -341,7 +341,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -349,7 +349,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -357,7 +357,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -367,7 +367,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -375,7 +375,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -383,7 +383,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -426,7 +426,7 @@ public void testScopedFacet() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -458,7 +458,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -472,7 +472,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); } @@ -647,7 +647,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -667,7 +667,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -687,7 +687,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -707,7 +707,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); @@ -768,7 +768,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); } ); @@ -778,7 +778,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); } ); @@ -801,7 +801,7 @@ public void testHasChildInnerHitsHighlighting() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); assertThat(searchHits.length, equalTo(1)); @@ -888,7 +888,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("p_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); @@ -903,7 +903,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("c_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(500L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); @@ -932,7 +932,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -943,7 +943,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); } @@ -961,7 +961,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -972,7 +972,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); } @@ -996,7 +996,7 @@ public void testHasChildQueryWithMinimumScore() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); } @@ -1411,7 +1411,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { 10, (respNum, response) -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); } ); } @@ -1469,7 +1469,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = NONE assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1479,7 +1479,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); @@ -1487,7 +1487,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1495,7 +1495,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1505,7 +1505,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1515,7 +1515,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1523,7 +1523,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1533,7 +1533,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = SUM assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1543,7 +1543,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1551,7 +1551,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); }); @@ -1559,7 +1559,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1569,7 +1569,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1579,7 +1579,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1587,7 +1587,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1597,7 +1597,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = MAX assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1607,7 +1607,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1615,7 +1615,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1623,7 +1623,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1633,7 +1633,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1643,7 +1643,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1651,7 +1651,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1661,7 +1661,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = AVG assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1671,7 +1671,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1679,7 +1679,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1687,7 +1687,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1697,7 +1697,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1707,7 +1707,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1715,7 +1715,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); }); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index 0ae10b297f709..6d6072b2992ca 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -128,7 +128,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); @@ -148,7 +148,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); @@ -280,7 +280,7 @@ public void testRandomParentChild() throws Exception { assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); @@ -290,7 +290,7 @@ public void testRandomParentChild() throws Exception { offset1 += child1InnerObjects[parent]; inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); @@ -347,12 +347,12 @@ public void testInnerHitsOnHasParent() throws Exception { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } ); @@ -394,11 +394,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); } ); @@ -417,11 +417,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); } ); @@ -482,34 +482,34 @@ public void testRoyals() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getTotalHits().value(), equalTo(4L)); assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("prince")); innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); } ); @@ -532,12 +532,12 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); } @@ -549,7 +549,7 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); }); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 258cbe743d7d3..60412179807a5 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -102,7 +102,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag public void collect(int docId, long owningBucketOrd) throws IOException { if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; collectionStrategy.add(owningBucketOrd, globalOrdinal); } } @@ -134,11 +134,6 @@ protected void prepareSubAggs(long[] ordsToCollect) throws IOException { public float score() { return 1f; } - - @Override - public int docID() { - return childDocsIter.docID(); - } }); final Bits liveDocs = ctx.reader().getLiveDocs(); @@ -150,7 +145,7 @@ public int docID() { continue; } int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; /* * Check if we contain every ordinal. It's almost certainly be * faster to replay all the matching ordinals and filter them down diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 9ecf4ed821e2a..6b00e94431bef 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -20,8 +20,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -137,12 +137,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 03a1677e60f47..707fcc822665f 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -107,7 +107,7 @@ public void testParentChild() throws IOException { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 91ec0e3c67691..ca90b0e588b18 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -104,7 +104,7 @@ public void testParentChild() throws IOException { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index d4fe49ec8c773..9244f815cd957 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; @@ -341,13 +342,13 @@ static void assertLateParsingQuery(Query query, String type, String id) throws I BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery(); assertThat(booleanQuery.clauses().size(), equalTo(2)); // check the inner ids query, we have to call rewrite to get to check the type it's executed against - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); - TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery(); - assertEquals(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(id)), termsQuery); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); + TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).query(); + assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery); // check the type filter - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery()); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.FILTER)); + assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).query()); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 255131b51a57a..393c7b6157077 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -110,74 +110,93 @@ public Explanation explain(LeafReaderContext leafReaderContext, int docId) throw } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); - if (approximation == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext leafReaderContext) throws IOException { + final ScorerSupplier approximationSupplier = candidateMatchesWeight.scorerSupplier(leafReaderContext); + if (approximationSupplier == null) { return null; } - final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + ScorerSupplier verifiedDocsScorer; if (scoreMode.needsScores()) { - return new BaseScorer(this, approximation) { - - float score; - - @Override - boolean matchDocId(int docId) throws IOException { - Query query = percolatorQueries.apply(docId); - if (query != null) { - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); - } - TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.scoreDocs.length > 0) { - score = topDocs.scoreDocs[0].score; - return true; - } else { - return false; + verifiedDocsScorer = null; + } else { + verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); + } + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximation = approximationSupplier.get(leadCost); + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(approximation) { + + float score; + + @Override + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.apply(docId); + if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + TopDocs topDocs = percolatorIndexSearcher.search(query, 1); + if (topDocs.scoreDocs.length > 0) { + score = topDocs.scoreDocs[0].score; + return true; + } else { + return false; + } + } else { + return false; + } } - } else { - return false; - } - } - @Override - public float score() { - return score; - } - }; - } else { - ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); - Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation) { + @Override + public float score() { + return score; + } + }; + } else { + Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); + return new BaseScorer(approximation) { + + @Override + public float score() throws IOException { + return 0f; + } - @Override - public float score() throws IOException { - return 0f; + boolean matchDocId(int docId) throws IOException { + // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. + // If docId also appears in the verifiedDocsBits then that means during indexing + // we were able to extract all query terms and for this candidate match + // and we determined based on the nature of the query that it is safe to skip + // the MemoryIndex verification. + if (verifiedDocsBits.get(docId)) { + return true; + } + Query query = percolatorQueries.apply(docId); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); + } + }; } + } - boolean matchDocId(int docId) throws IOException { - // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. - // If docId also appears in the verifiedDocsBits then that means during indexing - // we were able to extract all query terms and for this candidate match - // and we determined based on the nature of the query that it is safe to skip - // the MemoryIndex verification. - if (verifiedDocsBits.get(docId)) { - return true; - } - Query query = percolatorQueries.apply(docId); - if (query == null) { - return false; - } - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); - } - return Lucene.exists(percolatorIndexSearcher, query); - } - }; - } + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -265,8 +284,7 @@ abstract static class BaseScorer extends Scorer { final Scorer approximation; - BaseScorer(Weight weight, Scorer approximation) { - super(weight); + BaseScorer(Scorer approximation) { this.approximation = approximation; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 409b6fd70c3c7..d6422efdfed26 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -294,7 +294,7 @@ Tuple createCandidateQuery(IndexReader indexReader) throw List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher .getMaxClauseCount(); List subQueries = new ArrayList<>(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index c363746856681..8413b564c2041 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -91,7 +91,7 @@ public void process(HitContext hitContext) throws IOException { query = percolatorIndexSearcher.rewrite(query); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits.value == 0) { + if (topDocs.totalHits.value() == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index da4b10956dcf8..0e9aa6de3a0c0 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanTermQuery; @@ -26,12 +25,15 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.lucene.queries.BlendedTermQuery; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -162,7 +164,7 @@ public QueryVisitor getSubVisitor(Occur occur, Query parent) { int minimumShouldMatchValue = 0; if (parent instanceof BooleanQuery bq) { if (bq.getMinimumNumberShouldMatch() == 0 - && bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) { + && bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) { return QueryVisitor.EMPTY_VISITOR; } minimumShouldMatchValue = bq.getMinimumNumberShouldMatch(); @@ -198,11 +200,15 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - PrefixCodedTerms.TermIterator ti = q.getTermData().iterator(); + BytesRefIterator bytesRefIterator = q.getBytesRefIterator(); BytesRef term; Set qe = new HashSet<>(); - while ((term = ti.next()) != null) { - qe.add(new QueryExtraction(new Term(field, term))); + try { + while ((term = bytesRefIterator.next()) != null) { + qe.add(new QueryExtraction(new Term(field, term))); + } + } catch (IOException e) { + throw new UncheckedIOException(e); } this.terms.add(new Result(true, qe, 1)); } else { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 31e893ace72fd..ff321303b56c0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; @@ -56,6 +57,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -246,15 +248,13 @@ public void testDuel() throws Exception { queryFunctions.add( () -> new TermInSetQuery( field1, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); queryFunctions.add( () -> new TermInSetQuery( field2, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); // many iterations with boolean queries, which are the most complex queries to deal with when nested @@ -647,7 +647,7 @@ public void testRangeQueries() throws Exception { v ); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); @@ -655,7 +655,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); @@ -663,7 +663,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); @@ -671,7 +671,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); @@ -679,7 +679,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); @@ -690,7 +690,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -836,14 +836,14 @@ public void testPercolateMatchAll() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -875,7 +875,7 @@ public void testFunctionScoreQuery() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -931,15 +931,15 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -947,10 +947,10 @@ public void testPercolateSmallAndLargeDocument() throws Exception { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try (Directory directory = new ByteBuffersDirectory()) { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < maxClauseCount; i++) { @@ -970,22 +970,22 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); } } finally { - BooleanQuery.setMaxClauseCount(origMaxClauseCount); + IndexSearcher.setMaxClauseCount(origMaxClauseCount); } } @@ -1032,7 +1032,7 @@ public void testDuplicatedClauses() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -1066,7 +1066,7 @@ public void testDuplicatedClauses2() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1074,7 +1074,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1082,7 +1082,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1117,7 +1117,7 @@ public void testMsmAndRanges_disjunction() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1141,7 +1141,7 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); + assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value())); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); @@ -1164,12 +1164,13 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc); logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score); } + StoredFields storedFields = shardSearcher.storedFields(); for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) { logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc); logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score); // Additional stored information that is useful when debugging: - String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); + String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString); TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator(); @@ -1289,7 +1290,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); @@ -1313,7 +1314,7 @@ protected boolean match(int doc) { } } }; - return new Scorer(this) { + Scorer scorer = new Scorer() { @Override public int docID() { @@ -1335,6 +1336,7 @@ public float getMaxScore(int upTo) throws IOException { return _score[0]; } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 075d4d429fb39..04a8105b5fb82 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -118,7 +118,7 @@ public void testPercolateQuery() throws Exception { ) ); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -137,7 +137,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -166,7 +166,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(4L)); + assertThat(topDocs.totalHits.value(), equalTo(4L)); query = new PercolateQuery( "_name", @@ -178,7 +178,7 @@ public void testPercolateQuery() throws Exception { new MatchNoDocsQuery("") ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 100cda66acdcc..f72c68c6fd2e3 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -417,10 +418,10 @@ public void testExtractTermsAndRanges() throws Exception { } public void testCreateCandidateQuery() throws Exception { - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); addQueryFieldMappings(); MemoryIndex memoryIndex = new MemoryIndex(false); @@ -435,8 +436,8 @@ public void testCreateCandidateQuery() throws Exception { Tuple t = fieldType.createCandidateQuery(indexReader); assertTrue(t.v2()); assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); + assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class)); + assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class)); // Now push it over the edge, so that it falls back using TermInSetQuery memoryIndex.addField("field2", "value", new WhitespaceAnalyzer()); @@ -444,12 +445,12 @@ public void testCreateCandidateQuery() throws Exception { t = fieldType.createCandidateQuery(indexReader); assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); - TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery(); - assertEquals(maxClauseCount - 1, terms.getTermData().size()); - assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index a9c3e09e7f4ed..81427060615ea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -82,7 +82,7 @@ public void testExtractQueryMetadata_termQuery() { } public void testExtractQueryMetadata_termsQuery() { - TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); + TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2"))); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index a76ddf13e4595..8b94337141243 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -70,7 +70,7 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -85,7 +85,7 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -114,7 +114,7 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } } @@ -146,7 +146,7 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -162,7 +162,7 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 4b960e97ce0e0..d046ba881b5d4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -96,7 +95,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { return new CharacterRunAutomaton(Automata.makeEmpty()); } Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); - automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( "Refusing to start because whitelist " diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java index b924f8c311115..01459e2ff61bb 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java @@ -97,8 +97,8 @@ class Fields { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p); - assert totalHits.relation == TotalHits.Relation.EQUAL_TO; - return totalHits.value; + assert totalHits.relation() == TotalHits.Relation.EQUAL_TO; + return totalHits.value(); } else { // For BWC with nodes pre 7.0 return p.longValue(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 593d4b41df712..6c77186089644 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -242,7 +242,7 @@ public void testIndexedTermVectors() throws IOException { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> { LeafReader leaf = reader.leaves().get(0).reader(); - Terms terms = leaf.getTermVector(0, "field"); + Terms terms = leaf.termVectors().get(0, "field"); TermsEnum iterator = terms.iterator(); BytesRef term; Set foundTerms = new HashSet<>(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java index 61abd64e98a96..d4c4ccfaa442d 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java @@ -130,7 +130,7 @@ private void assertHighlightOneDoc( } TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java index 4594e8d71c6fb..b9f4943b1dab6 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java @@ -27,7 +27,6 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index return new SmbDirectoryWrapper( setPreload( new MMapDirectory(location, lockFactory), - lockFactory, new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)) ) ); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 8570662f7b523..73f291da15ead 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.test.NotEqualMessageBuilder; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -1694,6 +1695,211 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { } } + /** + * This test ensures that search results on old indices using "persian" analyzer don't change + * after we introduce Lucene 10 + */ + public void testPersianAnalyzerBWC() throws Exception { + var originalClusterLegacyPersianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyPersianAnalyzer); + final String indexName = "test_persian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "persian" + } + } + } + """; + + String query = """ + { + "query": { + "match": { + "textfield": "كتابها" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + + /** + * This test ensures that search results on old indices using "romanain" analyzer don't change + * after we introduce Lucene 10 + */ + public void testRomanianAnalyzerBWC() throws Exception { + var originalClusterLegacyRomanianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyRomanianAnalyzer); + final String indexName = "test_romanian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String cedillaForm = "absenţa"; + String commaForm = "absența"; + + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "romanian" + } + } + } + """; + + // query that uses the cedilla form of "t" + String query = """ + { + "query": { + "match": { + "textfield": "absenţa" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + // this doc uses the comma form + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + public void testForbidDisableSoftDeletesOnRestore() throws Exception { final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index a742e83255bbb..7525ff2dc12d2 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -54,7 +54,9 @@ tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } -tasks.named("yamlRestCompatTestTransform").configure({task -> - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") +tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") + task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dc79961ae78cd..81ca84a06f815 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,6 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -235,6 +234,47 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling description: + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "the profile description changed with Lucene 10" + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -276,7 +316,6 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } @@ -300,7 +339,6 @@ dfs knn vector profiling with vector_operations_count: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } - --- dfs profile for search with dfs_query_then_fetch: - requires: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index c56bc201e7f86..8bedf436e3698 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -571,7 +571,7 @@ public void testSearchQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); @@ -601,7 +601,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index cc6329a973b37..e8160a311bedb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -306,8 +306,8 @@ public void onFailure(Exception e) { prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value()); + logger.info("total: {}", expected.getHits().getTotalHits().value()); }) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index e1bf5bce6f3ae..8391ab270b1d1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -253,7 +253,7 @@ public void assertNested(String index, int numDocs) { // now, do a nested query assertNoFailuresAndResponse( prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 8b8b62da98f97..2fd6ee9a16808 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -141,11 +141,11 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 37904e9f639ac..4ed19065f32f2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -136,11 +136,11 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries; assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index cde8d41b292b7..4977d87d5a348 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -90,7 +90,7 @@ public void testSingleBulkRequest() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) 1)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) 1)); }); assertFalse(refCounted.hasReferences()); @@ -268,7 +268,7 @@ public void testMultipleBulkPartsWithBackoff() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docs)); }); } } @@ -358,7 +358,7 @@ public void testBulkLevelBulkFailureAfterFirstIncrementalRequest() throws Except assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(hits.get())); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(hits.get())); }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index af99a0344e030..cd17c5b345c59 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -81,7 +81,7 @@ public void testMappingValidationIndexExists() { ); indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); @@ -138,7 +138,7 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index 274cf90ec9529..f17196c3d97f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -45,9 +45,9 @@ public void testIndexWithWriteDelayEnabled() throws Exception { try { logger.debug("running search"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 66323e687eefb..e47925cef913b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -612,7 +612,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved)); assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); }); // create a PIT when some shards are missing @@ -637,7 +637,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId())); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); @@ -661,7 +661,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards)); assertThat(resp.getFailedShards(), equalTo(0)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs)); }); // ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the @@ -681,7 +681,7 @@ public void testMissingShardsWithPointInTime() throws Exception { } assertNotNull(resp.getHits().getTotalHits()); // we expect less documents as the newly indexed ones should not be part of the PIT - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index d1a68c68e7de5..a1395f81eb091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -143,7 +143,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -162,7 +162,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -221,7 +221,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce ); searchRequest.indices(""); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -241,7 +241,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { false ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); @@ -432,7 +432,7 @@ public void testSearchIdle() throws Exception { () -> assertResponse( prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)), - resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 848c5cacda1b9..b70da34c8fe3f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -396,7 +396,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); logger.info("--> checking filtering alias for one index"); @@ -406,7 +406,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); logger.info("--> checking filtering alias for two indices and one complete index"); @@ -416,7 +416,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); @@ -426,17 +426,17 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); assertResponse( prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); @@ -446,7 +446,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); } @@ -508,7 +508,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -517,7 +517,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); assertResponse( @@ -526,7 +526,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -535,7 +535,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -544,7 +544,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -553,7 +553,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); } @@ -608,7 +608,7 @@ public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> checking counts before delete"); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); } @@ -1399,7 +1399,7 @@ private void checkAliases() { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index 4e7c22f0d8847..f7dae8a92c2d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -44,7 +44,7 @@ public void testBroadcastOperations() throws IOException { for (int i = 0; i < 5; i++) { // test successful assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index eb10877f5892d..97994a38c277c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -152,7 +152,7 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { // test successful assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); @@ -164,7 +164,7 @@ public void testIndexActions() throws Exception { countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0) ); - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 5da9788e3079f..4d1ed9bce6440 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -115,7 +115,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -139,7 +139,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final")); }); } @@ -163,7 +163,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -187,7 +187,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index 7b7433e3aa4c3..cb280d5577fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -107,7 +107,7 @@ public void testMaxDocsLimit() throws Exception { indicesAdmin().prepareRefresh("test").get(); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); if (randomBoolean()) { indicesAdmin().prepareFlush("test").get(); @@ -117,7 +117,7 @@ public void testMaxDocsLimit() throws Exception { ensureGreen("test"); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 81a0e0ede7cd3..1194218c68ff1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -46,7 +46,7 @@ public void testDynamicTemplateCopyTo() throws Exception { AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount)); assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1)); assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 03afabaae1d0d..902dd911ddcd3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -115,7 +115,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I assertResponse( prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true), dupIdResponse -> { - assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index 62c5f934ec8b6..37fbc95d56506 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -57,9 +57,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with all types"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " @@ -77,9 +77,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with a specific type"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 7db810fc70ac1..52492ba7ce657 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -149,7 +149,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 5); @@ -161,7 +161,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); @@ -174,7 +174,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 6, 9); @@ -217,7 +217,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 0, 1); @@ -229,7 +229,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 1, 1); @@ -241,7 +241,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 2, 1); @@ -286,7 +286,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 0, 1); @@ -299,7 +299,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 1, 1); @@ -312,7 +312,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 2, 1); @@ -364,7 +364,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 0, 1); @@ -381,7 +381,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 1, 1); @@ -395,7 +395,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 2, 1); @@ -440,7 +440,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -453,7 +453,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -468,7 +468,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -483,7 +483,7 @@ public void testCanCache() throws Exception { .addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -497,7 +497,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 2); @@ -512,7 +512,7 @@ public void testCanCache() throws Exception { .addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 4); @@ -543,7 +543,7 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 0, 1); @@ -555,20 +555,20 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 1, 1); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 1, 2); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 2, 2); } @@ -591,7 +591,7 @@ public void testProfileDisableCache() throws Exception { client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index a6b168af5268d..cbb0a67edcb83 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -229,7 +229,7 @@ public void testCloseWhileRelocatingShards() throws Exception { for (String index : acknowledgedCloses) { assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { - long docsCount = response.getHits().getTotalHits().value; + long docsCount = response.getHits().getTotalHits().value(); assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 942f86017c617..77c4f8a26f478 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -344,7 +344,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC), response -> { logSearchResponse(numberOfShards, numberOfDocs, finalI, response); - iterationHitCount[finalI] = response.getHits().getTotalHits().value; + iterationHitCount[finalI] = response.getHits().getTotalHits().value(); if (iterationHitCount[finalI] != numberOfDocs) { error[0] = true; } @@ -391,7 +391,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, boolean[] errorOccurred = new boolean[1]; for (int i = 0; i < iterations; i++) { assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> { - if (response.getHits().getTotalHits().value != numberOfDocs) { + if (response.getHits().getTotalHits().value() != numberOfDocs) { errorOccurred[0] = true; } }); @@ -421,7 +421,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat logger.info( "iteration [{}] - returned documents: {} (expected {})", iteration, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), numberOfDocs ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index fb1fabfd198e6..2c56f75b051eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -240,7 +240,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), response -> { var hits = response.getHits(); - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; @@ -254,7 +254,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } set.forEach(value -> logger.error("Missing id [{}]", value)); } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", idx + 1); } ); @@ -364,9 +364,9 @@ public void indexShardStateChanged( for (Client client : clients()) { assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> { if (expectedCount[0] < 0) { - expectedCount[0] = response.getHits().getTotalHits().value; + expectedCount[0] = response.getHits().getTotalHits().value(); } else { - assertEquals(expectedCount[0], response.getHits().getTotalHits().value); + assertEquals(expectedCount[0], response.getHits().getTotalHits().value()); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 45dce5789b9bc..199c9a9fb4c8c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -296,7 +296,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()), response -> { logger.info("--> search all on index_* should find two"); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); // Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request // Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced. assertThat(response.getHits().getHits().length, equalTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 7bccf3db1284e..68bc6656cec7f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -160,7 +160,7 @@ private void verifyRoutedSearches(String index, Map> routing + "] shards for routing [" + routing + "] and got hits [" - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + "]" ); @@ -168,7 +168,7 @@ private void verifyRoutedSearches(String index, Map> routing response.getTotalShards() + " was not in " + expectedShards + " for " + index, expectedShards.contains(response.getTotalShards()) ); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); @@ -188,7 +188,7 @@ private void verifyBroadSearches(String index, Map> routingT prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)).setIndices(index).setSize(100), response -> { assertEquals(expectedShards, response.getTotalShards()); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index ee1aac60da9c1..f63f09764621b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -64,7 +64,7 @@ public void testTopHitsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } @@ -81,7 +81,7 @@ public void testAggsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertEquals(searchResponse.getHits().getHits().length, 0); StringTerms terms = searchResponse.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index d023c9de87ca5..4a407ae66f7ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -115,7 +115,7 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) ), response -> { - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), Matchers.equalTo(0L)); Histogram values = response.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getBuckets().isEmpty(), is(true)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 5a21b600cacd4..1a6e1519d4402 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -293,7 +293,7 @@ public void testDuelTerms() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); final Terms longTerms = response.getAggregations().get("long"); final Terms doubleTerms = response.getAggregations().get("double"); @@ -413,7 +413,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index a820e6e8d1747..2bd19c9d32d44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -57,7 +57,7 @@ public void testWrapperQueryIsRewritten() throws IOException { metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); assertResponse(client().prepareSearch("test").setSize(0).addAggregation(builder), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); InternalFilters filters = response.getAggregations().get("titles"); assertEquals(1, filters.getBuckets().size()); assertEquals(2, filters.getBuckets().get(0).getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a8e2ca818d3f4..c4560c1b00079 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -974,7 +974,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); @@ -1011,7 +1011,7 @@ public void testSingleValueWithTimeZone() throws Exception { .format("yyyy-MM-dd:HH-mm-ssZZZZZ") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -1175,7 +1175,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { assertThat( "Expected 24 buckets for one day aggregation with hourly interval", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), equalTo(2L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 778be4ee0705f..21b36391781b8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -78,7 +78,7 @@ public void testSingleValueWithPositiveOffset() throws Exception { dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -99,7 +99,7 @@ public void testSingleValueWithNegativeOffset() throws Exception { dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -128,7 +128,7 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { .fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(24L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(24L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index afa3ad9d7e737..9ec459ee565e5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -578,7 +578,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -722,7 +722,7 @@ public void testRangeWithFormatStringValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00:50:00-01:06:40", 3000000L, 4000000L); @@ -739,7 +739,7 @@ public void testRangeWithFormatStringValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -753,7 +753,7 @@ public void testRangeWithFormatStringValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); @@ -788,7 +788,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -799,7 +799,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -810,7 +810,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -827,7 +827,7 @@ public void testRangeWithFormatNumericValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -841,7 +841,7 @@ public void testRangeWithFormatNumericValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 1b70b859426d5..96807ed119866 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -159,7 +159,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index b030370215cd3..439583de910c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -247,7 +247,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -455,7 +455,7 @@ public void testEmptyAggregationWithOtherBucket() throws Exception { .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 843e50a5a7e21..907f943e68422 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -413,7 +413,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 2edd567221bef..ad65e6468b812 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -915,7 +915,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 72f1b0cc56b25..5e7cffcc8ef0d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -351,7 +351,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 8b63efd92a648..1cfd6e00af7ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -866,7 +866,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 5e2a44285e8fa..29bf8a8a0b45a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -97,7 +97,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -130,7 +130,7 @@ public void testUnmapped() throws Exception { assertResponse( prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ExtendedStats stats = response.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 762bc5bdfaf39..ff4150556c011 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -112,7 +112,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -138,7 +138,7 @@ public void testUnmapped() throws Exception { .field("value") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 12ed0a5c1a8e0..fe6dc7abf66a8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -116,7 +116,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -143,7 +143,7 @@ public void testUnmapped() throws Exception { .percentiles(0, 10, 15, 100) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 52425ae1d9f17..4c8fed2c16ddc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -358,7 +358,7 @@ public void testMap() { prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -407,7 +407,7 @@ public void testMapWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -467,7 +467,7 @@ public void testInitMutatesParams() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -522,7 +522,7 @@ public void testMapCombineWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -586,7 +586,7 @@ public void testInitMapCombineWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -655,7 +655,7 @@ public void testInitMapCombineReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -714,7 +714,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -773,7 +773,7 @@ public void testMapCombineReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -824,7 +824,7 @@ public void testInitMapReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -869,7 +869,7 @@ public void testMapReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -928,7 +928,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -964,7 +964,7 @@ public void testInitMapCombineReduceWithParamsStored() { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -1025,7 +1025,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); assertThat(aggregation, instanceOf(Histogram.class)); @@ -1099,7 +1099,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index fbe70ec2a40d6..1169f8bbdbf18 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -56,7 +56,7 @@ public void testEmptyAggregation() throws Exception { ), response -> { assertShardExecutionState(response, 0); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 2a8be6b4244dd..b3ad5c578e618 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -82,7 +82,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 2877f8882d6d6..d6cceb2013701 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -105,7 +105,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -146,7 +146,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index bbcf7b191fe1b..b4072bcf226ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -111,7 +111,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -132,7 +132,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 7ac8e3c7a35b4..80c47d6180db0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -328,7 +328,7 @@ public void testBasics() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); @@ -348,7 +348,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -381,7 +381,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -413,7 +413,7 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -444,7 +444,7 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -501,7 +501,7 @@ public void testPagination() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); + assertThat(hits.getTotalHits().value(), equalTo(controlHits.getTotalHits().value())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info( @@ -543,7 +543,7 @@ public void testSortByBucket() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); assertThat(hits.getAt(1).getSortValues()[0], equalTo(higestSortValue - 1)); @@ -578,7 +578,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("6")); @@ -586,7 +586,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(3L)); + assertThat(hits.getTotalHits().value(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("9")); @@ -594,7 +594,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); + assertThat(hits.getTotalHits().value(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("2")); } @@ -630,7 +630,7 @@ public void testFetchFeatures() throws IOException { for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); @@ -682,7 +682,7 @@ public void testEmptyIndex() throws Exception { TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); - assertThat(hits.getHits().getTotalHits().value, equalTo(0L)); + assertThat(hits.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -744,7 +744,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(1)); @@ -753,7 +753,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(2)); @@ -765,7 +765,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(4)); @@ -789,7 +789,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); - assertThat(topComments.getHits().getTotalHits().value, equalTo(4L)); + assertThat(topComments.getHits().getTotalHits().value(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); @@ -816,7 +816,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); - assertThat(topReviewers.getHits().getTotalHits().value, equalTo(7L)); + assertThat(topReviewers.getHits().getTotalHits().value(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); @@ -899,7 +899,7 @@ public void testNestedFetchFeatures() { assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); @@ -960,7 +960,7 @@ public void testTopHitsInNested() throws Exception { TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(numNestedDocs)); + assertThat(searchHits.getTotalHits().value(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); @@ -1064,7 +1064,7 @@ public void testNoStoredFields() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 3dee7a8d6e92f..6e00c1e5a8d90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -67,7 +67,7 @@ protected Collection> nodePlugins() { public void testUnmapped() throws Exception { assertResponse(prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(count("count").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ValueCount valueCount = response.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 3263be081a6f7..2cd22c6a65222 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -72,14 +72,14 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setPreference(preference + Integer.toString(counter++)) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { refresh(); assertResponse( client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterRefresh -> { logger.info( "hits count mismatch on any shard search failed, post explicit refresh hits are {}", - searchResponseAfterRefresh.getHits().getTotalHits().value + searchResponseAfterRefresh.getHits().getTotalHits().value() ); ensureGreen(); assertResponse( @@ -88,7 +88,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterGreen -> logger.info( "hits count mismatch on any shard search failed, post explicit wait for green hits are {}", - searchResponseAfterGreen.getHits().getTotalHits().value + searchResponseAfterGreen.getHits().getTotalHits().value() ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index cab70ba7d7339..0d06856ca1088 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -77,7 +77,7 @@ public void run() { try { while (stop.get() == false) { assertResponse(prepareSearch().setSize(numDocs), response -> { - if (response.getHits().getTotalHits().value != numDocs) { + if (response.getHits().getTotalHits().value() != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the // request comes in. It's a small window but a known limitation. @@ -86,7 +86,7 @@ public void run() { .allMatch(ssf -> ssf.getCause() instanceof NoShardAvailableActionException)) { nonCriticalExceptions.add( "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numDocs + " was expected. " @@ -100,7 +100,7 @@ public void run() { final SearchHits sh = response.getHits(); assertThat( "Expected hits to be the same size the actual hits array", - sh.getTotalHits().value, + sh.getTotalHits().value(), equalTo((long) (sh.getHits().length)) ); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 1745ad82931ba..4b59d5b9a78d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -126,7 +126,7 @@ public void testDfsQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -169,7 +169,7 @@ public void testDfsQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -208,7 +208,7 @@ public void testQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -237,7 +237,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(60)); for (int i = 0; i < 60; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -248,7 +248,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(40)); for (int i = 0; i < 40; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -271,7 +271,7 @@ public void testQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -301,7 +301,7 @@ public void testSimpleFacets() throws Exception { .aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1"))); assertNoFailuresAndResponse(client().search(new SearchRequest("test").source(sourceBuilder)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(100L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(100L)); Global global = response.getAggregations().get("global"); Filter all = global.getAggregations().get("all"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 223ee81e84a92..5233a0cd564ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -685,7 +685,7 @@ public void testDateMathIndexes() throws ExecutionException, InterruptedExceptio assertNotNull(localClusterSearchInfo); Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value, greaterThan(2L)); + assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value(), greaterThan(2L)); for (var hit : response.getHits()) { assertThat(hit.getIndex(), anyOf(equalTo("datemath-2001-01-01-14"), equalTo("remotemath-2001-01-01-14"))); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 2cb2e186b257e..91cc344614c23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -139,7 +139,7 @@ private void hitExecute(FetchContext context, HitContext hitContext) throws IOEx hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + Terms terms = hitContext.reader().termVectors().get(hitContext.docId(), field); if (terms != null) { TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 66d44a818b797..e39f8df9bad36 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -151,7 +151,7 @@ public void testSimpleNested() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -171,7 +171,7 @@ public void testSimpleNested() throws Exception { assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -196,7 +196,7 @@ public void testSimpleNested() throws Exception { ), response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); @@ -264,7 +264,7 @@ public void testRandomNested() throws Exception { SearchHit searchHit = response.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); @@ -273,7 +273,7 @@ public void testRandomNested() throws Exception { } inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); @@ -378,13 +378,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -409,13 +409,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -436,7 +436,7 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -460,13 +460,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -538,7 +538,7 @@ public void testNestedDefinedAsObject() throws Exception { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), @@ -613,7 +613,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit parent = response.getHits().getAt(0); assertThat(parent.getId(), equalTo("1")); SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getTotalHits().value(), equalTo(2L)); assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); } @@ -629,7 +629,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getTotalHits().value(), equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); @@ -651,7 +651,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); @@ -685,7 +685,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); @@ -786,22 +786,22 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getAt(0).getId(), equalTo("0")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } @@ -844,7 +844,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -865,7 +865,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -891,7 +891,7 @@ public void testNestedSource() throws Exception { ), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } ); @@ -901,7 +901,7 @@ public void testNestedSource() throws Exception { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0ce4f34463b03..0805d0f366b0f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3340,7 +3340,7 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); } ); @@ -3412,7 +3412,7 @@ public void testKeywordFieldHighlighting() throws IOException { .highlighter(new HighlightBuilder().field("*")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); assertThat(highlightField.fragments()[0].string(), equalTo("some text")); } @@ -3569,7 +3569,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { .should(QueryBuilders.termQuery("field", "hello")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index d1eb1ab533ab7..16e5e42e00c9f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -191,26 +191,26 @@ public void testStoredFields() throws Exception { indicesAdmin().prepareRefresh().get(); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field1"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); // field2 is not stored, check that it is not extracted from source. assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -218,7 +218,7 @@ public void testStoredFields() throws Exception { assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -226,20 +226,20 @@ public void testStoredFields() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -247,7 +247,7 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -311,7 +311,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -342,7 +342,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", Map.of("factor", 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); @@ -429,7 +429,7 @@ public void testIdBasedScriptFields() throws Exception { .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); @@ -638,7 +638,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .addStoredField("boolean_field") .addStoredField("binary_field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -681,7 +681,7 @@ public void testSearchFieldsMetadata() throws Exception { .get(); assertResponse(prepareSearch("my-index").addStoredField("field1").addStoredField("_routing"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field("field1"), nullValue()); assertThat(response.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); }); @@ -749,7 +749,7 @@ public void testGetFieldsComplexField() throws Exception { String field = "field1.field2.field3.field4"; assertResponse(prepareSearch("my-index").addStoredField(field), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(response.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(response.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); @@ -866,7 +866,7 @@ public void testDocValueFields() throws Exception { builder.addDocValueField("*_field"); } assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -906,7 +906,7 @@ public void testDocValueFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -955,7 +955,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("double_field", "#.0") .addDocValueField("date_field", "epoch_millis"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 36e75435bb5de..76384253282de 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -250,7 +250,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -276,7 +276,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -300,7 +300,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -373,7 +373,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -386,7 +386,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -405,7 +405,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("2")); assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -461,7 +461,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -481,7 +481,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } @@ -528,7 +528,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -546,7 +546,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -564,7 +564,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); logger.info( @@ -588,7 +588,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); } @@ -606,7 +606,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -624,7 +624,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); } @@ -1131,7 +1131,7 @@ public void testMultiFieldOptions() throws Exception { assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); }); List lonlat = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 7fb06c0b83015..a85d133450bec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -144,7 +144,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut ), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); + assertThat(hits.getTotalHits().value(), equalTo(20L)); int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index a0fe7e661020d..a38c9dc916056 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -145,9 +145,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -167,9 +167,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -224,9 +224,9 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { assertNoFailures(searchResponse); - assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); + assertThat((int) searchResponse.getHits().getTotalHits().value(), is(numMatchingDocs)); int pos = 0; - for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { + for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value(); hitId--) { assertThat(searchResponse.getHits().getAt(pos).getId(), equalTo(Integer.toString(hitId))); pos++; } @@ -242,7 +242,7 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int assertNoFailuresAndResponse( client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); termQueryScore[0] = response.getHits().getAt(0).getScore(); } ); @@ -259,7 +259,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); } ); @@ -269,7 +269,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) ) ), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 6043688b7670a..9fed4ead8c248 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -149,7 +149,7 @@ public void testRescorePhrase() throws Exception { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -429,7 +429,7 @@ private static void assertEquivalent(String query, SearchResponse plain, SearchR assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); @@ -855,7 +855,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); assertThat(response.getHits().getHits().length, equalTo(5)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(101f)); @@ -902,7 +902,7 @@ public void testRescoreAfterCollapse() throws Exception { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); SearchHit hit1 = resp.getHits().getAt(0); @@ -982,7 +982,7 @@ public void testRescoreAfterCollapseRandom() throws Exception { .setSize(Math.min(numGroups, 10)); long expectedNumHits = numHits; assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); for (int pos = 0; pos < resp.getHits().getHits().length; pos++) { SearchHit hit = resp.getHits().getAt(pos); assertThat(hit.getId(), equalTo(sortedGroups[pos].id())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 7fdb31a468998..22e27d78531a6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -268,7 +268,7 @@ public void testSeedReportedInExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); SearchHit firstHit = response.getHits().getAt(0); assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } @@ -283,12 +283,12 @@ public void testNoDocs() throws Exception { prepareSearch("test").setQuery( functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) ), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); assertNoFailuresAndResponse( prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 9b574cb54a116..2fde645f0036b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -426,7 +426,7 @@ public void testExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); Explanation explanation = response.getHits().getHits()[0].getExplanation(); assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6993f24b895e0..e6cd89c09b979 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -147,10 +147,10 @@ public void testProfileMatchesRegular() throws Exception { ); } - if (vanillaResponse.getHits().getTotalHits().value != profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() != profileResponse.getHits().getTotalHits().value()) { Set vanillaSet = new HashSet<>(Arrays.asList(vanillaResponse.getHits().getHits())); Set profileSet = new HashSet<>(Arrays.asList(profileResponse.getHits().getHits())); - if (vanillaResponse.getHits().getTotalHits().value > profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() > profileResponse.getHits().getTotalHits().value()) { vanillaSet.removeAll(profileSet); fail("Vanilla hits were larger than profile hits. Non-overlapping elements were: " + vanillaSet.toString()); } else { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index f263ececfdc7d..26b040e2309c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -133,7 +133,7 @@ public void testExists() throws Exception { response ), count, - response.getHits().getTotalHits().value + response.getHits().getTotalHits().value() ); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 96042e198ef43..0fd2bd6f94770 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -347,7 +347,7 @@ public void testPhraseType() { ).type(MatchQueryParser.Type.PHRASE) ) ), - response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), greaterThan(1L)) ); assertSearchHitsWithoutFailures( @@ -428,8 +428,8 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio matchResp -> { assertThat( "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + multiMatchResp.getHits().getTotalHits().value(), + equalTo(matchResp.getHits().getTotalHits().value()) ); SearchHits hits = multiMatchResp.getHits(); if (field.startsWith("missing")) { @@ -451,7 +451,7 @@ public void testEquivalence() { var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); final int numDocs; try { - numDocs = (int) response.getHits().getTotalHits().value; + numDocs = (int) response.getHits().getTotalHits().value(); } finally { response.decRef(); } @@ -944,7 +944,7 @@ private static void assertEquivalent(String query, SearchResponse left, SearchRe assertNoFailures(right); SearchHits leftHits = left.getHits(); SearchHits rightHits = right.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index e25e330e072a6..c8fe9498b156f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -263,7 +263,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 45b98686e0484..cffba49d5941c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -10,7 +10,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.analysis.MockTokenizer; @@ -264,7 +264,7 @@ public void testConstantScoreQuery() throws Exception { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); final long[] constantScoreTotalHits = new long[1]; assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { - constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + constantScoreTotalHits[0] = response.getHits().getTotalHits().value(); SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); @@ -277,7 +277,7 @@ public void testConstantScoreQuery() throws Exception { ).setSize(num), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + assertThat(hits.getTotalHits().value(), equalTo(constantScoreTotalHits[0])); if (constantScoreTotalHits[0] > 1) { float expected = hits.getAt(0).getScore(); for (SearchHit searchHit : hits) { @@ -1693,7 +1693,7 @@ public void testQueryStringParserCache() throws Exception { assertResponse( prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); first[0] = response.getHits().getAt(0).getScore(); } @@ -1704,7 +1704,7 @@ public void testQueryStringParserCache() throws Exception { prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); @@ -1917,7 +1917,9 @@ public Map> getTokenizers() { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexReader, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling + * {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. + * That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 35f11eb1429b4..522c20b687caa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -609,7 +609,7 @@ public void testSimpleQueryStringWithAnalysisStopWords() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java index 13a7d1fa59496..97aa428822fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java @@ -75,7 +75,7 @@ public void testSimpleSearch() throws ExecutionException, InterruptedException { assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(testClusterInfo.get("total_docs"))); + assertThat(response.getHits().getTotalHits().value(), equalTo(testClusterInfo.get("total_docs"))); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index 43197b77b2c1e..25b43a2dc946e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -78,8 +78,8 @@ public void testRewrite() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_0")); }); } @@ -91,8 +91,8 @@ public void testRewriteCompound() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 35990fa3755b1..9a7ce2c5c28ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -123,17 +123,17 @@ public void testSimplePreference() { assertResponse( prepareSearch().setQuery(matchAllQuery()), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 33b554a508e2b..06ce330213af8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -51,15 +51,15 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertEquals(3, nodeIds.size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 2c96c27a0d12d..f59be6bb75928 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -122,7 +122,7 @@ public void testCustomScriptBinaryField() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); } @@ -175,7 +175,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); @@ -196,7 +196,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } @@ -214,7 +214,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index d3da4639a3927..ac5738a9b67b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -44,7 +44,7 @@ public void testDuelQueryThenFetch() throws Exception { prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), control -> { SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) @@ -55,7 +55,7 @@ public void testDuelQueryThenFetch() throws Exception { try { assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); int counter = 0; @@ -69,7 +69,7 @@ public void testDuelQueryThenFetch() throws Exception { searchScrollResponse.decRef(); searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(10)).get(); assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); if (searchScrollResponse.getHits().getHits().length == 0) { break; } @@ -241,7 +241,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int try { while (true) { assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getTotalHits().value(), scroll.getHits().getTotalHits().value()); assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); if (scroll.getHits().getHits().length == 0) { break; @@ -255,7 +255,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int scroll.decRef(); scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll(TimeValue.timeValueMinutes(10)).get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); + assertEquals(control.getHits().getTotalHits().value(), scrollDocs); } catch (AssertionError e) { logger.info("Control:\n{}", control); logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 7c3dde22ce9d0..7ac24b77a4b6d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -89,7 +89,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -98,7 +98,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -107,7 +107,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -145,7 +145,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -155,7 +155,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -166,7 +166,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -176,7 +176,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -262,7 +262,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -278,7 +278,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -289,7 +289,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -300,7 +300,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -381,7 +381,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -397,7 +397,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -408,7 +408,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -419,7 +419,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -535,7 +535,7 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), searchResponse -> { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -601,7 +601,7 @@ public void testInvalidScrollKeepAlive() throws IOException { assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Exception ex = expectThrows( Exception.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 7c459f91a1ac0..353858e9d6974 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -150,7 +150,7 @@ public void testWithNullStrings() throws InterruptedException { .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index a62a042a3cab5..e87c4790aa665 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -555,7 +555,7 @@ public void testStrictlyCountRequest() throws Exception { assertNoFailuresAndResponse( prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(11L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 979cb9e8a8c4c..e079994003751 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -117,7 +117,7 @@ public void testWithPreferenceAndRoutings() throws Exception { setupIndex(totalDocs, numShards); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -129,7 +129,7 @@ public void testWithPreferenceAndRoutings() throws Exception { }); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -147,7 +147,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) ); assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) @@ -166,7 +166,7 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f SearchResponse searchResponse = request.slice(sliceBuilder).get(); try { totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -238,7 +238,7 @@ private void assertSearchSlicesWithPointInTime( SearchResponse searchResponse = request.get(); try { - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); while (true) { int numHits = searchResponse.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 3be427e37d60c..d1841ebaf8071 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -286,7 +286,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut assertNoFailuresAndResponse( prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = denseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -307,7 +307,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut .setSize(size) .addSort("sparse_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) sparseBytes.size())); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = sparseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -818,7 +818,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -828,7 +828,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -838,7 +838,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -884,7 +884,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -896,7 +896,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -908,7 +908,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -920,7 +920,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -1183,7 +1183,7 @@ public void testSortMVField() throws Exception { refresh(); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1197,7 +1197,7 @@ public void testSortMVField() throws Exception { }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1214,7 +1214,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1232,7 +1232,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1250,7 +1250,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1264,7 +1264,7 @@ public void testSortMVField() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1277,7 +1277,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1290,7 +1290,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1303,7 +1303,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1316,7 +1316,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1329,7 +1329,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1342,7 +1342,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1355,7 +1355,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1368,7 +1368,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1381,7 +1381,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1394,7 +1394,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1407,7 +1407,7 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1719,8 +1719,8 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), singleShardResponse -> { assertThat( - multiShardResponse.getHits().getTotalHits().value, - equalTo(singleShardResponse.getHits().getTotalHits().value) + multiShardResponse.getHits().getTotalHits().value(), + equalTo(singleShardResponse.getHits().getTotalHits().value()) ); assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { @@ -1747,14 +1747,14 @@ public void testCustomFormat() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); }); assertNoFailuresAndResponse( prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index ae0d2cbeb841f..fc5d40ae18c14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -362,7 +362,7 @@ public void testDocumentsWithNullValue() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -373,7 +373,7 @@ public void testDocumentsWithNullValue() throws Exception { .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -391,7 +391,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -409,7 +409,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 6351d8d906389..ec9c680e17fc3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -64,12 +64,12 @@ public void testInnerHits() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index 2952150c2cb22..f90056c6ae859 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -54,10 +54,10 @@ public void testCustomBM25Similarity() throws Exception { .get(); assertResponse(prepareSearch().setQuery(matchQuery("field1", "quick brown fox")), bm25SearchResponse -> { - assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(bm25SearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); assertResponse(prepareSearch().setQuery(matchQuery("field2", "quick brown fox")), booleanSearchResponse -> { - assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(booleanSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); assertThat(bm25Score, not(equalTo(defaultScore))); }); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 414a6c6ba66a6..89fc5f676cb1e 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -458,7 +458,8 @@ provides org.apache.lucene.codecs.Codec with org.elasticsearch.index.codec.Elasticsearch814Codec, - org.elasticsearch.index.codec.Elasticsearch816Codec; + org.elasticsearch.index.codec.Elasticsearch816Codec, + org.elasticsearch.index.codec.Elasticsearch900Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 666708ea6ffde..e668624440351 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; @@ -273,7 +274,7 @@ void analyzeDocValues(SegmentReader reader, IndexDiskUsageStats stats) throws IO } case SORTED_SET -> { SortedSetDocValues sortedSet = iterateDocValues(maxDocs, () -> docValuesReader.getSortedSet(field), dv -> { - while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + for (int i = 0; i < dv.docValueCount(); i++) { cancellationChecker.logEvent(); } }); @@ -544,13 +545,14 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -558,22 +560,23 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } case FLOAT32 -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -581,11 +584,11 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 8ac2033e2ff19..dda589a458f88 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -54,7 +54,7 @@ SearchSortValuesAndFormats getBottomSortValues() { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index d41a2561646b8..b52d76aac4132 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -57,8 +57,8 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { return; } // set the relation to the first non-equal relation - relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); - totalHits.add(result.queryResult().getTotalHits().value); + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation()); + totalHits.add(result.queryResult().getTotalHits().value()); terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); next.run(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 74786dff1648d..ca9c4ab44c423 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -892,8 +892,8 @@ TotalHits getTotalHits() { void add(TopDocsAndMaxScore topDocs, boolean timedOut, Boolean terminatedEarly) { if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits += topDocs.topDocs.totalHits.value; - if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += topDocs.topDocs.totalHits.value(); + if (topDocs.topDocs.totalHits.relation() == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 021ad8127a2d0..6a881163914e4 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -701,7 +701,7 @@ String jvmVendor() { } String javaVersion() { - return Constants.JAVA_VERSION; + return Runtime.version().toString(); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 5043508c781f0..a57b8b4d23cdb 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -88,7 +88,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene912"; + public static final String LATEST_CODEC = "Lucene100"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -392,8 +392,8 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 625438ebdff97..cbceef120b877 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -177,6 +178,11 @@ public boolean seekExact(BytesRef text) throws IOException { } } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { + return () -> this.seekExact(bytesRef); + } + @Override public int docFreq() throws IOException { return currentDocFreq; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index 5bc52253939af..9460aba0a99cb 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -38,8 +37,6 @@ public static Automaton caseInsensitivePrefix(String s) { Automaton a = Operations.concatenate(list); // since all elements in the list should be deterministic already, the concatenation also is, so no need to determinized assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); - assert a.isDeterministic(); return a; } @@ -100,7 +97,7 @@ public static Automaton toCaseInsensitiveWildcardAutomaton(Term wildcardquery) { i += length; } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } protected static Automaton toCaseInsensitiveString(BytesRef br) { @@ -117,7 +114,6 @@ public static Automaton toCaseInsensitiveString(String s) { Automaton a = Operations.concatenate(list); // concatenating deterministic automata should result in a deterministic automaton. No need to determinize here. assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); return a; } @@ -132,7 +128,6 @@ public static Automaton toCaseInsensitiveChar(int codepoint) { if (altCase != codepoint) { result = Operations.union(case1, Automata.makeChar(altCase)); // this automaton should always be deterministic, no need to determinize - result = MinimizationOperations.minimize(result, 0); assert result.isDeterministic(); } else { result = case1; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java index b6f102a98203f..65688b69f5aa0 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java @@ -20,12 +20,12 @@ public CaseInsensitivePrefixQuery(Term term) { super(term, caseInsensitivePrefix(term.text())); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary) { + super(term, caseInsensitivePrefix(term.text()), isBinary); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { + super(term, caseInsensitivePrefix(term.text()), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java index 91700e5ffe6c1..6368acf383120 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java @@ -26,8 +26,8 @@ public CaseInsensitiveWildcardQuery(Term term) { super(term, toCaseInsensitiveWildcardAutomaton(term)); } - public CaseInsensitiveWildcardQuery(Term term, int determinizeWorkLimit, boolean isBinary, RewriteMethod rewriteMethod) { - super(term, toCaseInsensitiveWildcardAutomaton(term), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitiveWildcardQuery(Term term, boolean isBinary, RewriteMethod rewriteMethod) { + super(term, toCaseInsensitiveWildcardAutomaton(term), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 25fa926ada2c8..e2ac58caccd57 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -123,7 +123,7 @@ public static Query applyMinimumShouldMatch(BooleanQuery query, @Nullable String } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 13fae303909f5..299739fc3ba8a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -19,7 +19,7 @@ import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.AttributeSource; @@ -42,7 +42,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap private final boolean hardLimit; public SpanBooleanQueryRewriteWithMaxClause() { - this(BooleanQuery.getMaxClauseCount(), true); + this(IndexSearcher.getMaxClauseCount(), true); } public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { @@ -59,10 +59,11 @@ public boolean isHardLimit() { } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { + IndexReader reader = indexSearcher.getIndexReader(); Collection queries = collectTerms(reader, query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); @@ -99,7 +100,7 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { @@ -112,6 +113,6 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(indexSearcher, query); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index f8d0c81466dcc..54cd4c9946f62 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -207,7 +208,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #setMaxQueryTerms */ public static final int DEFAULT_MAX_QUERY_TERMS = 25; @@ -468,7 +469,7 @@ private void addToQuery(PriorityQueue q, BooleanQuery.Builder query) try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index ff82160be0325..5a0c216c4e717 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -272,44 +272,65 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + private ScorerSupplier functionScorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - final long leadCost = subQueryScorer.iterator().cost(); - final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; - final Bits[] docSets = new Bits[functions.length]; - for (int i = 0; i < functions.length; i++) { - ScoreFunction function = functions[i]; - leafFunctions[i] = function.getLeafScoreFunction(context); - if (filterWeights[i] != null) { - ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); - } else { - docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; + final Bits[] docSets = new Bits[functions.length]; + for (int i = 0; i < functions.length; i++) { + ScoreFunction function = functions[i]; + leafFunctions[i] = function.getLeafScoreFunction(context); + if (filterWeights[i] != null) { + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); + } else { + docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + } + } + return new FunctionFactorScorer( + subQueryScorer, + scoreMode, + functions, + maxBoost, + leafFunctions, + docSets, + combineFunction, + needsScores + ); } - } - return new FunctionFactorScorer( - this, - subQueryScorer, - scoreMode, - functions, - maxBoost, - leafFunctions, - docSets, - combineFunction, - needsScores - ); + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer scorer = functionScorer(context); - if (scorer != null && minScore != null) { - scorer = new MinScoreScorer(this, scorer, minScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier scorerSupplier = functionScorerSupplier(context); + + if (scorerSupplier == null || minScore == null) { + return scorerSupplier; } - return scorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(scorerSupplier.get(leadCost), minScore); + } + + @Override + public long cost() { + return scorerSupplier.cost(); + } + }; } @Override @@ -356,7 +377,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } else if (singleFunction && functionsExplanations.size() == 1) { factorExplanation = functionsExplanations.get(0); } else { - FunctionFactorScorer scorer = functionScorer(context); + + FunctionFactorScorer scorer = (FunctionFactorScorer) functionScorerSupplier(context).get(1L); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue().floatValue()); @@ -391,7 +413,6 @@ static class FunctionFactorScorer extends FilterScorer { private final boolean needsScores; private FunctionFactorScorer( - CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions, @@ -401,7 +422,7 @@ private FunctionFactorScorer( CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 3d23f66b09d82..0fd46447b3ea9 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import java.io.IOException; @@ -25,12 +24,11 @@ public final class MinScoreScorer extends Scorer { private float curScore; private final float boost; - public MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - this(weight, scorer, minScore, 1f); + public MinScoreScorer(Scorer scorer, float minScore) { + this(scorer, minScore, 1f); } - public MinScoreScorer(Weight weight, Scorer scorer, float minScore, float boost) { - super(weight); + public MinScoreScorer(Scorer scorer, float minScore, float boost) { this.in = scorer; this.minScore = minScore; this.boost = boost; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 4222b5dff98ab..d38243f5348c4 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -27,14 +27,8 @@ public class ScriptScoreFunction extends ScoreFunction { static final class CannedScorer extends Scorable { - protected int docid; protected float score; - @Override - public int docID() { - return docid; - } - @Override public float score() { return score; @@ -70,14 +64,13 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx if (script.needs_termStats()) { assert termStatsFactory != null; - leafScript._setTermStats(termStatsFactory.apply(ctx, scorer::docID)); + leafScript._setTermStats(termStatsFactory.apply(ctx, leafScript::docId)); } return new LeafScoreFunction() { private double score(int docId, float subQueryScore, ScoreScript.ExplanationHolder holder) throws IOException { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore; double result = leafScript.execute(holder); @@ -97,7 +90,6 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE Explanation exp; if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java index 5e3f8e8e62714..e58b2fffed001 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -38,6 +39,7 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.function.IntSupplier; /** * A query that uses a script to compute documents' scores. @@ -104,30 +106,40 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new Weight(this) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - if (minScore == null) { - final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context); - if (subQueryBulkScorer == null) { - return null; - } - return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); - } else { - return super.bulkScorer(context); - } - } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - Scorer scriptScorer = new ScriptScorer(this, makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); - if (minScore != null) { - scriptScorer = new MinScoreScorer(this, scriptScorer, minScore); - } - return scriptScorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + Scorer scriptScorer = new ScriptScorer(makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); + if (minScore != null) { + scriptScorer = new MinScoreScorer(scriptScorer, minScore); + } + return scriptScorer; + } + + @Override + public BulkScorer bulkScorer() throws IOException { + if (minScore == null) { + final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer(); + return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); + } else { + return super.bulkScorer(); + } + } + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override @@ -138,7 +150,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } ExplanationHolder explanationHolder = new ExplanationHolder(); Scorer scorer = new ScriptScorer( - this, makeScoreScript(context), subQueryWeight.scorer(context), subQueryScoreMode, @@ -231,14 +242,12 @@ private static class ScriptScorer extends Scorer { private final ExplanationHolder explanation; ScriptScorer( - Weight weight, ScoreScript scoreScript, Scorer subQueryScorer, ScoreMode subQueryScoreMode, float boost, ExplanationHolder explanation ) { - super(weight); this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); @@ -292,19 +301,27 @@ private static class ScriptScorable extends Scorable { private final ScoreScript scoreScript; private final Scorable subQueryScorer; private final float boost; + private final IntSupplier docIDSupplier; - ScriptScorable(ScoreScript scoreScript, Scorable subQueryScorer, ScoreMode subQueryScoreMode, float boost) { + ScriptScorable( + ScoreScript scoreScript, + Scorable subQueryScorer, + ScoreMode subQueryScoreMode, + float boost, + IntSupplier docIDSupplier + ) { this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); } this.subQueryScorer = subQueryScorer; this.boost = boost; + this.docIDSupplier = docIDSupplier; } @Override public float score() throws IOException { - int docId = docID(); + int docId = docIDSupplier.getAsInt(); scoreScript.setDocument(docId); float score = (float) scoreScript.execute(null); if (score < 0f || Float.isNaN(score)) { @@ -320,10 +337,6 @@ public float score() throws IOException { return score * boost; } - @Override - public int docID() { - return subQueryScorer.docID(); - } } /** @@ -350,9 +363,18 @@ public int score(LeafCollector collector, Bits acceptDocs, int min, int max) thr private LeafCollector wrapCollector(LeafCollector collector) { return new FilterLeafCollector(collector) { + + private int docID; + @Override public void setScorer(Scorable scorer) throws IOException { - in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost)); + in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost, () -> docID)); + } + + @Override + public void collect(int doc) throws IOException { + this.docID = doc; + super.collect(doc); } }; } diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index d5b2e8497fc0b..aaaab78b71736 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -69,7 +69,7 @@ public static Automaton simpleMatchToAutomaton(String pattern) { previous = i + 1; } automata.add(Automata.makeString(pattern.substring(previous))); - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** @@ -113,7 +113,7 @@ public static Automaton simpleMatchToAutomaton(String... patterns) { prefixAutomaton.add(Automata.makeAnyString()); automata.add(Operations.concatenate(prefixAutomaton)); } - return Operations.union(automata); + return Operations.determinize(Operations.union(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 232ce34b153ab..defaddb25eb47 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -254,7 +254,7 @@ public static KeyStoreWrapper load(Path configDir) throws IOException { } Directory directory = new NIOFSDirectory(configDir); - try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, CURRENT_VERSION); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 2ef96123e63d8..c4b03c712c272 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -280,8 +280,8 @@ public static Function, Map> filter(String[] include = matchAllAutomaton; } else { Automaton includeA = Regex.simpleMatchToAutomaton(includes); - includeA = makeMatchDotsInFieldNames(includeA); - include = new CharacterRunAutomaton(includeA, MAX_DETERMINIZED_STATES); + includeA = Operations.determinize(makeMatchDotsInFieldNames(includeA), MAX_DETERMINIZED_STATES); + include = new CharacterRunAutomaton(includeA); } Automaton excludeA; @@ -289,9 +289,9 @@ public static Function, Map> filter(String[] excludeA = Automata.makeEmpty(); } else { excludeA = Regex.simpleMatchToAutomaton(excludes); - excludeA = makeMatchDotsInFieldNames(excludeA); + excludeA = Operations.determinize(makeMatchDotsInFieldNames(excludeA), MAX_DETERMINIZED_STATES); } - CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA, MAX_DETERMINIZED_STATES); + CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA); // NOTE: We cannot use Operations.minus because of the special case that // we want all sub properties to match as soon as an object matches diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 749946e05b745..0c6cf2c8a0761 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.DocIdSetIterator; @@ -449,7 +450,7 @@ OnDiskState loadBestOnDiskState(boolean checkClean) throws IOException { // resources during test execution checkIndex.setThreadCount(1); checkIndex.setInfoStream(printStream); - checkIndex.setChecksumsOnly(true); + checkIndex.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); isClean = checkIndex.checkIndex().clean; } @@ -705,10 +706,11 @@ private static void consumeFromType( final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final BytesArray documentData = new BytesArray(document.getBinaryValue(DATA_FIELD_NAME)); if (document.getField(PAGE_FIELD_NAME) == null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 7eed5f2b7759d..4ff7ef60cc0a2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -18,7 +18,6 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; @@ -451,7 +450,7 @@ public boolean match(String setting) { } public static Type defaultStoreType(final boolean allowMmap) { - if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (allowMmap && Constants.JRE_IS_64BIT) { return Type.HYBRIDFS; } else { return Type.NIOFS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7e04a64e74cb5..efb1facc79b3a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; +import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -48,29 +49,38 @@ private static IndexVersion def(int id, Version luceneVersion) { return new IndexVersion(id, luceneVersion); } + // TODO: this is just a hack to allow to keep the V7 IndexVersion constants, during compilation. Remove + private static Version parseUnchecked(String version) { + try { + return Version.parse(version); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -118,6 +128,9 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); + + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 144b99abe5644..c1c392ac07f18 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 2ba169583b712..00614140e237a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -49,11 +49,12 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm deduplicated[i++] = new FieldInfo( FieldMapper.internFieldName(fi.getName()), fi.number, - fi.hasVectors(), + fi.hasTermVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), + fi.docValuesSkipIndexType(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 27ff19a9d8e40..9f46050f68f99 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java new file mode 100644 index 0000000000000..4154a242c15ed --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900", new Lucene100Codec()); + this.storedFieldsFormat = mode.getFormat(); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index 64c2ca788f63c..bf2c5a9f01e29 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene912Codec { +public final class LegacyPerFieldMapperCodec extends Lucene100Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 83c5cb396d88b..b60b88da5949d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch816Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java index d26fb52a82bcd..81129835518da 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -142,12 +141,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { CodecUtil.checkIndexHeader( metaIn, BLOOM_CODEC_NAME, diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 01d874adec14d..abf68abe51887 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -291,12 +290,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { Map bloomFilters = null; Throwable priorE = null; long indexFileLength = 0; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 5d79807fe6674..dc73428a07c7c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.lucene90.IndexedDISI; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -41,9 +42,13 @@ import org.elasticsearch.core.IOUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_LEVEL_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { @@ -51,9 +56,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; private byte[] termsDictBuffer; - - ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) - throws IOException { + private final int skipIndexIntervalSize; + + ES87TSDBDocValuesConsumer( + SegmentWriteState state, + int skipIndexIntervalSize, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { @@ -76,6 +88,7 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { state.segmentSuffix ); maxDoc = state.segmentInfo.maxDoc(); + this.skipIndexIntervalSize = skipIndexIntervalSize; success = true; } finally { if (success == false) { @@ -88,12 +101,17 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeField(field, new EmptyDocValuesProducer() { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }, -1); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + + writeField(field, producer, -1); } private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { @@ -263,13 +281,11 @@ public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) th public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED); - doAddSortedField(field, valuesProducer); + doAddSortedField(field, valuesProducer, false); } - private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - SortedDocValues sorted = valuesProducer.getSorted(field); - int maxOrd = sorted.getValueCount(); - writeField(field, new EmptyDocValuesProducer() { + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedDocValues sorted = valuesProducer.getSorted(field); @@ -306,7 +322,16 @@ public long cost() { }; return DocValues.singleton(sortedOrds); } - }, maxOrd); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + if (addTypeByte) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + } + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, producer, maxOrd); addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); } @@ -459,6 +484,12 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, valuesProducer); + } + if (maxOrd > -1) { + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + } long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; @@ -510,16 +541,14 @@ public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) meta.writeByte(SORTED_SET); if (isSingleValued(valuesProducer.getSortedSet(field))) { - meta.writeByte((byte) 0); // multiValued (0 = singleValued) doAddSortedField(field, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); } - }); + }, true); return; } - meta.writeByte((byte) 1); // multiValued (1 = multiValued) SortedSetDocValues values = valuesProducer.getSortedSet(field); long maxOrd = values.getValueCount(); @@ -603,4 +632,157 @@ public void close() throws IOException { meta = data = null; } } + + private static class SkipAccumulator { + int minDocID; + int maxDocID; + int docCount; + long minValue; + long maxValue; + + SkipAccumulator(int docID) { + minDocID = docID; + minValue = Long.MAX_VALUE; + maxValue = Long.MIN_VALUE; + docCount = 0; + } + + boolean isDone(int skipIndexIntervalSize, int valueCount, long nextValue, int nextDoc) { + if (docCount < skipIndexIntervalSize) { + return false; + } + // Once we reach the interval size, we will keep accepting documents if + // - next doc value is not a multi-value + // - current accumulator only contains a single value and next value is the same value + // - the accumulator is dense and the next doc keeps the density (no gaps) + return valueCount > 1 || minValue != maxValue || minValue != nextValue || docCount != nextDoc - minDocID; + } + + void accumulate(long value) { + minValue = Math.min(minValue, value); + maxValue = Math.max(maxValue, value); + } + + void accumulate(SkipAccumulator other) { + assert minDocID <= other.minDocID && maxDocID < other.maxDocID; + maxDocID = other.maxDocID; + minValue = Math.min(minValue, other.minValue); + maxValue = Math.max(maxValue, other.maxValue); + docCount += other.docCount; + } + + void nextDoc(int docID) { + maxDocID = docID; + ++docCount; + } + + public static SkipAccumulator merge(List list, int index, int length) { + SkipAccumulator acc = new SkipAccumulator(list.get(index).minDocID); + for (int i = 0; i < length; i++) { + acc.accumulate(list.get(index + i)); + } + return acc; + } + } + + private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + assert field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE; + final long start = data.getFilePointer(); + final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + long globalMaxValue = Long.MIN_VALUE; + long globalMinValue = Long.MAX_VALUE; + int globalDocCount = 0; + int maxDocId = -1; + final List accumulators = new ArrayList<>(); + SkipAccumulator accumulator = null; + final int maxAccumulators = 1 << (SKIP_INDEX_LEVEL_SHIFT * (SKIP_INDEX_MAX_LEVEL - 1)); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final long firstValue = values.nextValue(); + if (accumulator != null && accumulator.isDone(skipIndexIntervalSize, values.docValueCount(), firstValue, doc)) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + accumulator = null; + if (accumulators.size() == maxAccumulators) { + writeLevels(accumulators); + accumulators.clear(); + } + } + if (accumulator == null) { + accumulator = new SkipAccumulator(doc); + accumulators.add(accumulator); + } + accumulator.nextDoc(doc); + accumulator.accumulate(firstValue); + for (int i = 1, end = values.docValueCount(); i < end; ++i) { + accumulator.accumulate(values.nextValue()); + } + } + + if (accumulators.isEmpty() == false) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + writeLevels(accumulators); + } + meta.writeLong(start); // record the start in meta + meta.writeLong(data.getFilePointer() - start); // record the length + assert globalDocCount == 0 || globalMaxValue >= globalMinValue; + meta.writeLong(globalMaxValue); + meta.writeLong(globalMinValue); + assert globalDocCount <= maxDocId + 1; + meta.writeInt(globalDocCount); + meta.writeInt(maxDocId); + } + + private void writeLevels(List accumulators) throws IOException { + final List> accumulatorsLevels = new ArrayList<>(SKIP_INDEX_MAX_LEVEL); + accumulatorsLevels.add(accumulators); + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL - 1; i++) { + accumulatorsLevels.add(buildLevel(accumulatorsLevels.get(i))); + } + int totalAccumulators = accumulators.size(); + for (int index = 0; index < totalAccumulators; index++) { + // compute how many levels we need to write for the current accumulator + final int levels = getLevels(index, totalAccumulators); + // write the number of levels + data.writeByte((byte) levels); + // write intervals in reverse order. This is done so we don't + // need to read all of them in case of slipping + for (int level = levels - 1; level >= 0; level--) { + final SkipAccumulator accumulator = accumulatorsLevels.get(level).get(index >> (SKIP_INDEX_LEVEL_SHIFT * level)); + data.writeInt(accumulator.maxDocID); + data.writeInt(accumulator.minDocID); + data.writeLong(accumulator.maxValue); + data.writeLong(accumulator.minValue); + data.writeInt(accumulator.docCount); + } + } + } + + private static List buildLevel(List accumulators) { + final int levelSize = 1 << SKIP_INDEX_LEVEL_SHIFT; + final List collector = new ArrayList<>(); + for (int i = 0; i < accumulators.size() - levelSize + 1; i += levelSize) { + collector.add(SkipAccumulator.merge(accumulators, i, levelSize)); + } + return collector; + } + + private static int getLevels(int index, int size) { + if (Integer.numberOfTrailingZeros(index) >= SKIP_INDEX_LEVEL_SHIFT) { + // TODO: can we do it in constant time rather than linearly with SKIP_INDEX_MAX_LEVEL? + final int left = size - index; + for (int level = SKIP_INDEX_MAX_LEVEL - 1; level > 0; level--) { + final int numberIntervals = 1 << (SKIP_INDEX_LEVEL_SHIFT * level); + if (left >= numberIntervals && index % numberIntervals == 0) { + return level + 1; + } + } + } + return 1; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index 742249892f61f..496c41b42869a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -43,13 +43,57 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + // number of documents in an interval + private static final int DEFAULT_SKIP_INDEX_INTERVAL_SIZE = 4096; + // bytes on an interval: + // * 1 byte : number of levels + // * 16 bytes: min / max value, + // * 8 bytes: min / max docID + // * 4 bytes: number of documents + private static final long SKIP_INDEX_INTERVAL_BYTES = 29L; + // number of intervals represented as a shift to create a new level, this is 1 << 3 == 8 + // intervals. + static final int SKIP_INDEX_LEVEL_SHIFT = 3; + // max number of levels + // Increasing this number, it increases how much heap we need at index time. + // we currently need (1 * 8 * 8 * 8) = 512 accumulators on heap + static final int SKIP_INDEX_MAX_LEVEL = 4; + // number of bytes to skip when skipping a level. It does not take into account the + // current interval that is being read. + static final long[] SKIP_INDEX_JUMP_LENGTH_PER_LEVEL = new long[SKIP_INDEX_MAX_LEVEL]; + + static { + // Size of the interval minus read bytes (1 byte for level and 4 bytes for maxDocID) + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[0] = SKIP_INDEX_INTERVAL_BYTES - 5L; + for (int level = 1; level < SKIP_INDEX_MAX_LEVEL; level++) { + // jump from previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] = SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level - 1]; + // nodes added by new level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] += (1 << (level * SKIP_INDEX_LEVEL_SHIFT)) * SKIP_INDEX_INTERVAL_BYTES; + // remove the byte levels added in the previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] -= (1 << ((level - 1) * SKIP_INDEX_LEVEL_SHIFT)); + } + } + + private final int skipIndexIntervalSize; + + /** Default constructor. */ public ES87TSDBDocValuesFormat() { + this(DEFAULT_SKIP_INDEX_INTERVAL_SIZE); + } + + /** Doc values fields format with specified skipIndexIntervalSize. */ + public ES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(CODEC_NAME); + if (skipIndexIntervalSize < 2) { + throw new IllegalArgumentException("skipIndexIntervalSize must be > 1, got [" + skipIndexIntervalSize + "]"); + } + this.skipIndexIntervalSize = skipIndexIntervalSize; } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new ES87TSDBDocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + return new ES87TSDBDocValuesConsumer(state, skipIndexIntervalSize, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index e3f7e829c1d2e..d5c94de1c6942 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -16,6 +16,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -27,6 +29,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataInput; @@ -43,6 +46,8 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_JUMP_LENGTH_PER_LEVEL; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; public class ES87TSDBDocValuesProducer extends DocValuesProducer { @@ -51,6 +56,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map sorted = new HashMap<>(); private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); + private final Map skippers = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -61,7 +67,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { // read in the entries from the metadata file. int version = -1; - try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) { + try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) { Throwable priorE = null; try { @@ -659,9 +665,8 @@ public long nextOrd() throws IOException { i = 0; count = ords.docValueCount(); } - if (i++ == count) { - return NO_MORE_ORDS; - } + assert i < count; + i++; return ords.nextValue(); } @@ -700,6 +705,116 @@ public long cost() { }; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + final DocValuesSkipperEntry entry = skippers.get(field.name); + + final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length); + // Prefetch the first page of data. Following pages are expected to get prefetched through + // read-ahead. + if (input.length() > 0) { + input.prefetch(0, 1); + } + // TODO: should we write to disk the actual max level for this segment? + return new DocValuesSkipper() { + final int[] minDocID = new int[SKIP_INDEX_MAX_LEVEL]; + final int[] maxDocID = new int[SKIP_INDEX_MAX_LEVEL]; + + { + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = -1; + } + } + + final long[] minValue = new long[SKIP_INDEX_MAX_LEVEL]; + final long[] maxValue = new long[SKIP_INDEX_MAX_LEVEL]; + final int[] docCount = new int[SKIP_INDEX_MAX_LEVEL]; + int levels = 1; + + @Override + public void advance(int target) throws IOException { + if (target > entry.maxDocId) { + // skipper is exhausted + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = DocIdSetIterator.NO_MORE_DOCS; + } + } else { + // find next interval + assert target > maxDocID[0] : "target must be bigger that current interval"; + while (true) { + levels = input.readByte(); + assert levels <= SKIP_INDEX_MAX_LEVEL && levels > 0 : "level out of range [" + levels + "]"; + boolean valid = true; + // check if current interval is competitive or we can jump to the next position + for (int level = levels - 1; level >= 0; level--) { + if ((maxDocID[level] = input.readInt()) < target) { + input.skipBytes(SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level]); // the jump for the level + valid = false; + break; + } + minDocID[level] = input.readInt(); + maxValue[level] = input.readLong(); + minValue[level] = input.readLong(); + docCount[level] = input.readInt(); + } + if (valid) { + // adjust levels + while (levels < SKIP_INDEX_MAX_LEVEL && maxDocID[levels] >= target) { + levels++; + } + break; + } + } + } + } + + @Override + public int numLevels() { + return levels; + } + + @Override + public int minDocID(int level) { + return minDocID[level]; + } + + @Override + public int maxDocID(int level) { + return maxDocID[level]; + } + + @Override + public long minValue(int level) { + return minValue[level]; + } + + @Override + public long maxValue(int level) { + return maxValue[level]; + } + + @Override + public int docCount(int level) { + return docCount[level]; + } + + @Override + public long minValue() { + return entry.minValue; + } + + @Override + public long maxValue() { + return entry.maxValue; + } + + @Override + public int docCount() { + return entry.docCount; + } + }; + } + @Override public void checkIntegrity() throws IOException { CodecUtil.checksumEntireFile(data); @@ -717,6 +832,9 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); + if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + skippers.put(info.name, readDocValueSkipperMeta(meta)); + } if (type == ES87TSDBDocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == ES87TSDBDocValuesFormat.BINARY) { @@ -739,6 +857,17 @@ private static NumericEntry readNumeric(IndexInput meta) throws IOException { return entry; } + private static DocValuesSkipperEntry readDocValueSkipperMeta(IndexInput meta) throws IOException { + long offset = meta.readLong(); + long length = meta.readLong(); + long maxValue = meta.readLong(); + long minValue = meta.readLong(); + int docCount = meta.readInt(); + int maxDocID = meta.readInt(); + + return new DocValuesSkipperEntry(offset, length, minValue, maxValue, docCount, maxDocID); + } + private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); @@ -1249,6 +1378,8 @@ private void set() { } } + private record DocValuesSkipperEntry(long offset, long length, long minValue, long maxValue, int docCount, int maxDocId) {} + private static class NumericEntry { long docsWithFieldOffset; long docsWithFieldLength; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java index 73dd4273a794e..cf69ab0862949 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java @@ -19,23 +19,52 @@ */ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.util.VectorUtil; import java.io.IOException; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; + /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ -public abstract class BinarizedByteVectorValues extends DocIdSetIterator { - - public abstract float[] getCorrectiveTerms(); +public abstract class BinarizedByteVectorValues extends ByteVectorValues { - public abstract byte[] vectorValue() throws IOException; + public abstract float[] getCorrectiveTerms(int vectorOrd) throws IOException; /** Return the dimension of the vectors */ public abstract int dimension(); + /** Returns the centroid distance for the vector */ + public abstract float getCentroidDistance(int vectorOrd) throws IOException; + + /** Returns the vector magnitude for the vector */ + public abstract float getVectorMagnitude(int vectorOrd) throws IOException; + + /** Returns OOQ corrective factor for the given vector ordinal */ + public abstract float getOOQ(int targetOrd) throws IOException; + + /** + * Returns the norm of the target vector w the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getNormOC(int targetOrd) throws IOException; + + /** + * Returns the target vector dot product the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getODotC(int targetOrd) throws IOException; + + /** + * @return the quantizer used to quantize the vectors + */ + public abstract BinaryQuantizer getQuantizer(); + + public abstract float[] getCentroid() throws IOException; + /** * Return the number of vectors for this field. * @@ -43,9 +72,16 @@ public abstract class BinarizedByteVectorValues extends DocIdSetIterator { */ public abstract int size(); - @Override - public final long cost() { - return size(); + int discretizedDimensions() { + return BQVectorUtils.discretize(dimension(), 64); + } + + float sqrtDimensions() { + return (float) constSqrt(dimension()); + } + + float maxX1() { + return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); } /** @@ -55,4 +91,13 @@ public final long cost() { * @return a {@link VectorScorer} instance or null */ public abstract VectorScorer scorer(float[] query) throws IOException; + + @Override + public abstract BinarizedByteVectorValues copy() throws IOException; + + float getCentroidDP() throws IOException { + // this only gets executed on-merge + float[] centroid = getCentroid(); + return VectorUtil.dotProduct(centroid, centroid); + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index cc5454ee074e6..ab882c8b04648 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -152,10 +152,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 9491598653c44..662e4040511e2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -160,11 +160,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 10a20839ab3c5..4c4fd00806954 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -22,18 +22,17 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.simdvec.VectorSimilarityType; @@ -246,9 +245,9 @@ public String toString() { } @Override - public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) + public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, KnnVectorValues values) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorerSupplier(sim, values); @@ -256,7 +255,7 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity if (factory != null) { var scorer = factory.getInt7SQVectorScorerSupplier( VectorSimilarityType.of(sim), - values.getSlice(), + qValues.getSlice(), qValues, qValues.getScalarQuantizer().getConstantMultiplier() ); @@ -269,9 +268,9 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, float[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, float[] query) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorer(sim, values, query); @@ -287,7 +286,7 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, byte[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, byte[] query) throws IOException { return delegate.getRandomVectorScorer(sim, values, query); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index 7e586e210afd3..18668f4f304b0 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -14,14 +14,15 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.io.IOException; @@ -68,14 +69,14 @@ public String toString() { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues + KnnVectorValues vectorValues ) throws IOException { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - assert randomAccessVectorValues instanceof RandomAccessQuantizedByteVectorValues == false; + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + assert byteVectorValues instanceof QuantizedByteVectorValues == false; return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(randomAccessVectorValuesBytes); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(byteVectorValues); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -84,18 +85,15 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - byte[] bytes - ) { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + KnnVectorValues vectorValues, + byte[] target + ) throws IOException { + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - checkDimensions(bytes.length, randomAccessVectorValuesBytes.dimension()); + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + checkDimensions(target.length, byteVectorValues.dimension()); return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer( - randomAccessVectorValuesBytes, - bytes - ); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer(byteVectorValues, target); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -103,10 +101,10 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( - VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - float[] floats - ) { + VectorSimilarityFunction similarityFunction, + KnnVectorValues vectorValues, + float[] target + ) throws IOException { throw new IllegalArgumentException("Unsupported vector type"); } } @@ -117,9 +115,9 @@ static float hammingScore(byte[] a, byte[] b) { static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final byte[] query; - private final RandomAccessVectorValues.Bytes byteValues; + private final ByteVectorValues byteValues; - HammingVectorScorer(RandomAccessVectorValues.Bytes byteValues, byte[] query) { + HammingVectorScorer(ByteVectorValues byteValues, byte[] query) { super(byteValues); this.query = query; this.byteValues = byteValues; @@ -132,9 +130,9 @@ public float score(int i) throws IOException { } static class HammingScorerSupplier implements RandomVectorScorerSupplier { - private final RandomAccessVectorValues.Bytes byteValues, byteValues1, byteValues2; + private final ByteVectorValues byteValues, byteValues1, byteValues2; - HammingScorerSupplier(RandomAccessVectorValues.Bytes byteValues) throws IOException { + HammingScorerSupplier(ByteVectorValues byteValues) throws IOException { this.byteValues = byteValues; this.byteValues1 = byteValues.copy(); this.byteValues2 = byteValues.copy(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java index f4d22edc6dfdb..72c5da4880e75 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -20,10 +20,10 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.simdvec.ESVectorUtil; @@ -45,9 +45,9 @@ public ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues + KnnVectorValues vectorValues ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues) { + if (vectorValues instanceof BinarizedByteVectorValues) { throw new UnsupportedOperationException( "getRandomVectorScorerSupplier(VectorSimilarityFunction,RandomAccessVectorValues) not implemented for binarized format" ); @@ -58,10 +58,10 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, float[] target ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues binarizedVectors) { + if (vectorValues instanceof BinarizedByteVectorValues binarizedVectors) { BinaryQuantizer quantizer = binarizedVectors.getQuantizer(); float[] centroid = binarizedVectors.getCentroid(); // FIXME: precompute this once? @@ -82,7 +82,7 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, byte[] target ) throws IOException { return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); @@ -91,7 +91,7 @@ public RandomVectorScorer getRandomVectorScorer( RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues scoringVectors, - RandomAccessBinarizedByteVectorValues targetVectors + BinarizedByteVectorValues targetVectors ) { return new BinarizedRandomVectorScorerSupplier(scoringVectors, targetVectors, similarityFunction); } @@ -104,12 +104,12 @@ public String toString() { /** Vector scorer supplier over binarized vector values */ static class BinarizedRandomVectorScorerSupplier implements RandomVectorScorerSupplier { private final ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; BinarizedRandomVectorScorerSupplier( ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { this.queryVectors = queryVectors; @@ -149,7 +149,7 @@ public record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors fact /** Vector scorer over binarized vector values */ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final BinaryQueryVector queryVector; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; private final float sqrtDimensions; @@ -157,7 +157,7 @@ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.Abstr public BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { super(targetVectors); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java index b0378fee6793d..21c4a5c449387 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java @@ -36,6 +36,7 @@ import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; @@ -78,7 +79,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.META_EXTENSION ); boolean success = false; - try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) { + try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) { Throwable priorE = null; try { versionMeta = CodecUtil.checkIndexHeader( @@ -102,7 +103,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, // Quantized vectors are accessed randomly from their node ID stored in the HNSW // graph. - state.context.withRandomAccess() + state.context.withReadAdvice(ReadAdvice.RANDOM) ); success = true; } finally { @@ -357,9 +358,9 @@ static FieldEntry create(IndexInput input, VectorEncoding vectorEncoding, Vector /** Binarized vector values holding row and quantized vector values */ protected static final class BinarizedVectorValues extends FloatVectorValues { private final FloatVectorValues rawVectorValues; - private final OffHeapBinarizedVectorValues quantizedVectorValues; + private final BinarizedByteVectorValues quantizedVectorValues; - BinarizedVectorValues(FloatVectorValues rawVectorValues, OffHeapBinarizedVectorValues quantizedVectorValues) { + BinarizedVectorValues(FloatVectorValues rawVectorValues, BinarizedByteVectorValues quantizedVectorValues) { this.rawVectorValues = rawVectorValues; this.quantizedVectorValues = quantizedVectorValues; } @@ -375,29 +376,28 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - return rawVectorValues.vectorValue(); + public float[] vectorValue(int ord) throws IOException { + return rawVectorValues.vectorValue(ord); } @Override - public int docID() { - return rawVectorValues.docID(); + public BinarizedVectorValues copy() throws IOException { + return new BinarizedVectorValues(rawVectorValues.copy(), quantizedVectorValues.copy()); } @Override - public int nextDoc() throws IOException { - int rawDocId = rawVectorValues.nextDoc(); - int quantizedDocId = quantizedVectorValues.nextDoc(); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public Bits getAcceptOrds(Bits acceptDocs) { + return rawVectorValues.getAcceptOrds(acceptDocs); } @Override - public int advance(int target) throws IOException { - int rawDocId = rawVectorValues.advance(target); - int quantizedDocId = quantizedVectorValues.advance(target); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public int ordToDoc(int ord) { + return rawVectorValues.ordToDoc(ord); + } + + @Override + public DocIndexIterator iterator() { + return rawVectorValues.iterator(); } @Override @@ -405,7 +405,7 @@ public VectorScorer scorer(float[] query) throws IOException { return quantizedVectorValues.scorer(query); } - protected OffHeapBinarizedVectorValues getQuantizedVectorValues() throws IOException { + protected BinarizedByteVectorValues getQuantizedVectorValues() throws IOException { return quantizedVectorValues; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java index 92837a8ffce45..a7774b850b64c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; @@ -44,7 +45,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.core.SuppressForbidden; @@ -354,10 +354,11 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( int queryCorrectionCount = binaryQuantizer.getSimilarity() != EUCLIDEAN ? 5 : 3; final ByteBuffer queryCorrectionsBuffer = ByteBuffer.allocate(Float.BYTES * queryCorrectionCount + Short.BYTES) .order(ByteOrder.LITTLE_ENDIAN); - for (int docV = floatVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = floatVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write index vector BinaryQuantizer.QueryAndIndexResults r = binaryQuantizer.quantizeQueryAndIndex( - floatVectorValues.vectorValue(), + floatVectorValues.vectorValue(iterator.index()), toIndex, toQuery, centroid @@ -393,11 +394,12 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( static DocsWithFieldSet writeBinarizedVectorData(IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) throws IOException { DocsWithFieldSet docsWithField = new DocsWithFieldSet(); - for (int docV = binarizedByteVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = binarizedByteVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = binarizedByteVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write vector - byte[] binaryValue = binarizedByteVectorValues.vectorValue(); + byte[] binaryValue = binarizedByteVectorValues.vectorValue(iterator.index()); output.writeBytes(binaryValue, binaryValue.length); - float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(); + float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(iterator.index()); for (int i = 0; i < corrections.length; i++) { output.writeInt(Float.floatToIntBits(corrections[i])); } @@ -598,8 +600,9 @@ static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] if (vectorValues == null) { continue; } - for (int doc = vectorValues.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = vectorValues.nextDoc()) { - float[] vector = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { + float[] vector = vectorValues.vectorValue(iterator.index()); // TODO Panama sum for (int j = 0; j < vector.length; j++) { centroid[j] += vector[j]; @@ -827,23 +830,31 @@ static class BinarizedFloatVectorValues extends BinarizedByteVectorValues { private final float[] centroid; private final FloatVectorValues values; private final BinaryQuantizer quantizer; - private int lastDoc; + private int lastOrd = -1; BinarizedFloatVectorValues(FloatVectorValues delegate, BinaryQuantizer quantizer, float[] centroid) { this.values = delegate; this.quantizer = quantizer; this.binarized = new byte[BQVectorUtils.discretize(delegate.dimension(), 64) / 8]; this.centroid = centroid; - lastDoc = -1; } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int ord) { + if (ord != lastOrd) { + throw new IllegalStateException( + "attempt to retrieve corrective terms for different ord " + ord + " than the quantization was done for: " + lastOrd + ); + } return corrections; } @Override - public byte[] vectorValue() throws IOException { + public byte[] vectorValue(int ord) throws IOException { + if (ord != lastOrd) { + binarize(ord); + lastOrd = ord; + } return binarized; } @@ -853,33 +864,43 @@ public int dimension() { } @Override - public int size() { - return values.size(); + public float getCentroidDistance(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int docID() { - return values.docID(); + public float getVectorMagnitude(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int nextDoc() throws IOException { - int doc = values.nextDoc(); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getOOQ(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int advance(int target) throws IOException { - int doc = values.advance(target); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getNormOC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getODotC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public BinaryQuantizer getQuantizer() { + throw new UnsupportedOperationException(); + } + + @Override + public float[] getCentroid() throws IOException { + return centroid; + } + + @Override + public int size() { + return values.size(); } @Override @@ -887,22 +908,32 @@ public VectorScorer scorer(float[] target) throws IOException { throw new UnsupportedOperationException(); } - private void binarize() throws IOException { - if (lastDoc == docID()) return; - corrections = quantizer.quantizeForIndex(values.vectorValue(), binarized, centroid); + @Override + public BinarizedByteVectorValues copy() throws IOException { + return new BinarizedFloatVectorValues(values.copy(), quantizer, centroid); + } + + private void binarize(int ord) throws IOException { + corrections = quantizer.quantizeForIndex(values.vectorValue(ord), binarized, centroid); + } + + @Override + public DocIndexIterator iterator() { + return values.iterator(); + } + + @Override + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } } static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { private final RandomVectorScorerSupplier supplier; - private final RandomAccessVectorValues vectorValues; + private final KnnVectorValues vectorValues; private final Closeable onClose; - BinarizedCloseableRandomVectorScorerSupplier( - RandomVectorScorerSupplier supplier, - RandomAccessVectorValues vectorValues, - Closeable onClose - ) { + BinarizedCloseableRandomVectorScorerSupplier(RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) { this.supplier = supplier; this.onClose = onClose; this.vectorValues = vectorValues; @@ -932,7 +963,6 @@ public int totalVectorCount() { static final class NormalizedFloatVectorValues extends FloatVectorValues { private final FloatVectorValues values; private final float[] normalizedVector; - int curDoc = -1; NormalizedFloatVectorValues(FloatVectorValues values) { this.values = values; @@ -950,38 +980,25 @@ public int size() { } @Override - public float[] vectorValue() { - return normalizedVector; + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } @Override - public VectorScorer scorer(float[] query) { - throw new UnsupportedOperationException(); - } - - @Override - public int docID() { - return values.docID(); + public float[] vectorValue(int ord) throws IOException { + System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); + return normalizedVector; } @Override - public int nextDoc() throws IOException { - curDoc = values.nextDoc(); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public DocIndexIterator iterator() { + return values.iterator(); } @Override - public int advance(int target) throws IOException { - curDoc = values.advance(target); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public NormalizedFloatVectorValues copy() throws IOException { + return new NormalizedFloatVectorValues(values.copy()); } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java index 628480e273b34..e7d818bb752d6 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -37,7 +37,7 @@ import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; /** Binarized vector values loaded from off-heap */ -public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { +public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { protected final int dimension; protected final int size; @@ -131,7 +131,12 @@ public float getCentroidDP() { } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); return correctiveValues; } @@ -195,11 +200,6 @@ public float[] getCentroid() { return centroid; } - @Override - public IndexInput getSlice() { - return slice; - } - @Override public int getVectorByteLength() { return numBytes; @@ -252,8 +252,6 @@ public static OffHeapBinarizedVectorValues load( /** Dense off-heap binarized vector values */ public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { - private int doc = -1; - public DenseOffHeapVectorValues( int dimension, int size, @@ -267,30 +265,6 @@ public DenseOffHeapVectorValues( super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(doc); - } - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - assert docID() < target; - if (target >= size) { - return doc = NO_MORE_DOCS; - } - return doc = target; - } - @Override public DenseOffHeapVectorValues copy() throws IOException { return new DenseOffHeapVectorValues( @@ -313,19 +287,25 @@ public Bits getAcceptOrds(Bits acceptDocs) { @Override public VectorScorer scorer(float[] target) throws IOException { DenseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.doc); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } + + @Override + public DocIndexIterator iterator() { + return createDenseIterator(); + } } /** Sparse off-heap binarized vector values */ @@ -355,27 +335,6 @@ private static class SparseOffHeapVectorValues extends OffHeapBinarizedVectorVal this.disi = configuration.getIndexedDISI(dataIn); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(disi.index()); - } - - @Override - public int docID() { - return disi.docID(); - } - - @Override - public int nextDoc() throws IOException { - return disi.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - assert docID() < target; - return disi.advance(target); - } - @Override public SparseOffHeapVectorValues copy() throws IOException { return new SparseOffHeapVectorValues( @@ -415,19 +374,25 @@ public int length() { }; } + @Override + public DocIndexIterator iterator() { + return IndexedDISI.asDocIndexIterator(disi); + } + @Override public VectorScorer scorer(float[] target) throws IOException { SparseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.disi.index()); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } @@ -441,23 +406,8 @@ private static class EmptyOffHeapVectorValues extends OffHeapBinarizedVectorValu } @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - return doc = NO_MORE_DOCS; - } - - @Override - public byte[] vectorValue() { - throw new UnsupportedOperationException(); + public DocIndexIterator iterator() { + return createDenseIterator(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java deleted file mode 100644 index 5163baf617c29..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2024 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.vectors; - -import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; - -import java.io.IOException; - -import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; - -/** - * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 - */ -public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVectorValues.Bytes { - /** Returns the centroid distance for the vector */ - float getCentroidDistance(int vectorOrd) throws IOException; - - /** Returns the vector magnitude for the vector */ - float getVectorMagnitude(int vectorOrd) throws IOException; - - /** Returns OOQ corrective factor for the given vector ordinal */ - float getOOQ(int targetOrd) throws IOException; - - /** - * Returns the norm of the target vector w the centroid corrective factor for the given vector - * ordinal - */ - float getNormOC(int targetOrd) throws IOException; - - /** - * Returns the target vector dot product the centroid corrective factor for the given vector - * ordinal - */ - float getODotC(int targetOrd) throws IOException; - - /** - * @return the quantizer used to quantize the vectors - */ - BinaryQuantizer getQuantizer(); - - default int discretizedDimensions() { - return BQVectorUtils.discretize(dimension(), 64); - } - - default float sqrtDimensions() { - return (float) constSqrt(dimension()); - } - - default float maxX1() { - return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); - } - - /** - * @return coarse grained centroids for the vectors - */ - float[] getCentroid() throws IOException; - - @Override - RandomAccessBinarizedByteVectorValues copy() throws IOException; - - default float getCentroidDP() throws IOException { - // this only gets executed on-merge - float[] centroid = getCentroid(); - return VectorUtil.dotProduct(centroid, centroid); - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 05cc6d148be5e..e44b344d3b283 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -119,7 +119,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.parallelArray = new ParallelArray(this.searchBatchSize); this.indexVersionCreated = indexVersionCreated; final TopDocs topDocs = searchOperations(null, accessStats); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } @@ -341,7 +341,7 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); } final Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 18b5ba69ca320..3e99818d1827b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -13,6 +13,7 @@ import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -188,6 +189,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { return in.getSortedSet(field); } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } + @Override public void checkIntegrity() throws IOException { in.checkIntegrity(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index c7acd730fadb5..0f772b49bf92b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -13,10 +13,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexCommit; @@ -152,6 +153,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -171,6 +173,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -190,6 +193,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.DOCS, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -346,6 +350,11 @@ public NumericDocValues getNormValues(String field) throws IOException { return getDelegate().getNormValues(field); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return getDelegate().getDocValuesSkipper(field); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { return getDelegate().getFloatVectorValues(field); @@ -389,11 +398,6 @@ public LeafMetaData getMetaData() { return getDelegate().getMetaData(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - return getDelegate().getTermVectors(docID); - } - @Override public TermVectors termVectors() throws IOException { return getDelegate().termVectors(); @@ -429,11 +433,6 @@ public int maxDoc() { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - private void readStoredFieldsDirectly(StoredFieldVisitor visitor) throws IOException { if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { BytesReference sourceBytes = operation.source(); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 84e85f3ddf2b4..d4e34181b876f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -52,12 +52,7 @@ public boolean advanceExact(int target) throws IOException { @Override public long nextOrd() throws IOException { - long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return getGlobalOrd(segmentOrd); - } + return getGlobalOrd(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index 0439383ccbd05..0f72e491d8110 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -40,13 +40,13 @@ public static boolean significantlySmallerThanSinglePackedOrdinals( float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; @@ -153,6 +153,7 @@ private static class MultiDocs extends AbstractSortedSetDocValues { private long currentOffset; private long currentEndOffset; + private int count; MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; @@ -170,21 +171,19 @@ public long getValueCount() { public boolean advanceExact(int docId) { currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentEndOffset = endOffsets.get(docId); + count = Math.toIntExact(currentEndOffset - currentOffset); return currentOffset != currentEndOffset; } @Override public long nextOrd() { - if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return ords.get(currentOffset++); - } + assert currentOffset != currentEndOffset; + return ords.get(currentOffset++); } @Override public int docValueCount() { - return Math.toIntExact(currentEndOffset - currentOffset); + return count; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 57572dea8ac0f..d05f0e477db09 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -687,7 +688,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongField.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), @@ -959,7 +960,7 @@ private void indexValue(DocumentParserContext context, long timestamp) { } if (indexed && hasDocValues) { - context.doc().add(new LongField(fieldType().name(), timestamp)); + context.doc().add(new LongField(fieldType().name(), timestamp, Field.Store.NO)); } else if (hasDocValues) { context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 494005ce12cb1..d37f6c51d288d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -11,10 +11,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -147,11 +148,6 @@ public FieldInfos getFieldInfos() { return new FieldInfos(new FieldInfo[0]); } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -203,6 +199,11 @@ public NumericDocValues getNormValues(String field) throws IOException { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String s) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -233,11 +234,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public int numDocs() { throw new UnsupportedOperationException(); @@ -284,6 +280,7 @@ private static FieldInfo fieldInfo(String name) { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -484,9 +481,7 @@ private static SortedSetDocValues sortedSetDocValues(List values) { @Override public long nextOrd() { i++; - if (i >= values.size()) { - return NO_MORE_ORDS; - } + assert i < values.size(); return i; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index b9d89462c3467..8e418f45ddb3a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -97,13 +98,13 @@ public boolean isSearchable() { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(v -> { + List bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } return Uid.encodeId(idObject.toString()); - }).toArray(BytesRef[]::new); + }).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index 6900dcd773917..8114167c02486 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -76,8 +75,8 @@ static CompiledAutomaton buildIpPrefixAutomaton(String ipPrefix) { } else { result = Automata.makeAnyBinary(); } - result = MinimizationOperations.minimize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CompiledAutomaton(result, null, false, 0, true); + result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new CompiledAutomaton(result, false, false, true); } private static Automaton getIpv6Automaton(String ipPrefix) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1ff9fd2f699c9..802680e7f373e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; @@ -491,7 +490,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + Collection bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } } @@ -597,7 +596,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi ? AutomatonQueries.caseInsensitivePrefix(prefix) : Operations.concatenate(Automata.makeString(prefix), Automata.makeAnyString()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index f1924fd04f3fe..c6f1b490a2be2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -70,7 +69,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + var bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 2e815554dc829..3608e8ab261c1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntField; @@ -589,7 +590,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final float f = value.floatValue(); if (indexed && docValued) { - document.add(new FloatField(name, f)); + document.add(new FloatField(name, f, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(f))); } else if (indexed) { @@ -743,7 +744,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final double d = value.doubleValue(); if (indexed && docValued) { - document.add(new DoubleField(name, d)); + document.add(new DoubleField(name, d, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(d))); } else if (indexed) { @@ -1179,7 +1180,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final int i = value.intValue(); if (indexed && docValued) { - document.add(new IntField(name, i)); + document.add(new IntField(name, i, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, i)); } else if (indexed) { @@ -1330,7 +1331,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final long l = value.longValue(); if (indexed && docValued) { - document.add(new LongField(name, l)); + document.add(new LongField(name, l, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, l)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 9ea16933f7ab5..ceb96b87a0983 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -101,9 +101,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool failIfNotIndexed(); Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { - return method == null - ? new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitivePrefixQuery(prefix, false) : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -170,9 +168,7 @@ protected Query wildcardQuery( term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - return method == null - ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitiveWildcardQuery(term) : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index 674a016264c3a..e2ff9cc7ea632 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collection; +import java.util.List; import java.util.Map; /** Base {@link MappedFieldType} implementation for a field that is indexed @@ -69,7 +70,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + List bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 642539fbbc2f8..3f77edc819602 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -598,8 +598,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index ac1de94ea7a73..93a2157b2338a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -28,10 +28,10 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -394,7 +394,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi a = Operations.concatenate(a, Automata.makeAnyString()); } assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a); if (searchAfter != null) { @@ -483,6 +482,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index b94ea67c8de8d..b29f093e3a217 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -205,12 +205,8 @@ public long nextOrd() throws IOException { } long ord = delegate.nextOrd(); - if (ord != NO_MORE_ORDS && ord <= maxOrd) { - assert ord >= minOrd; - return mapOrd(ord); - } else { - return NO_MORE_ORDS; - } + assert ord <= maxOrd; + return mapOrd(ord); } @Override @@ -223,9 +219,9 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - while (true) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } if (ord >= minOrd) { @@ -246,7 +242,7 @@ public boolean advanceExact(int target) throws IOException { while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java index e8da3b72ae7c7..04069333deb13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -45,24 +45,13 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - // Lazy load vectors as we may iterate but not actually require the vector - return vectorValue(in.docID()); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - return in.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -74,22 +63,24 @@ public float magnitude() { return magnitude; } - private float[] vectorValue(int docId) throws IOException { + @Override + public float[] vectorValue(int ord) throws IOException { + int docId = ordToDoc(ord); if (docId != this.docId) { this.docId = docId; hasMagnitude = decodedMagnitude(docId); // We should only copy and transform if we have a stored a non-unit length magnitude if (hasMagnitude) { - System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + System.arraycopy(in.vectorValue(ord), 0, vector, 0, dimension()); for (int i = 0; i < vector.length; i++) { vector[i] *= magnitude; } return vector; } else { - return in.vectorValue(); + return in.vectorValue(ord); } } else { - return hasMagnitude ? vector : in.vectorValue(); + return hasMagnitude ? vector : in.vectorValue(ord); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index a023837a0efb7..809532c0e8f5a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; @@ -2309,6 +2310,7 @@ private class IndexedSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyn private ByteVectorValues byteVectorValues; private boolean hasValue; private boolean hasMagnitude; + private int ord; private final IndexVersion indexCreatedVersion; private final VectorSimilarity vectorSimilarity; @@ -2326,16 +2328,20 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX); } + KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { - hasValue = docId == values.advance(docId); + hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); + ord = iterator.index(); return hasValue; }; } byteVectorValues = leafReader.getByteVectorValues(fullPath()); if (byteVectorValues != null) { + KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { - hasValue = docId == byteVectorValues.advance(docId); + hasValue = docId == iterator.advance(docId); + ord = iterator.index(); return hasValue; }; } @@ -2358,7 +2364,7 @@ public void write(XContentBuilder b) throws IOException { } b.startArray(leafName()); if (values != null) { - for (float v : values.vectorValue()) { + for (float v : values.vectorValue(ord)) { if (hasMagnitude) { b.value(v * magnitude); } else { @@ -2366,7 +2372,7 @@ public void write(XContentBuilder b) throws IOException { } } } else if (byteVectorValues != null) { - byte[] vectorValue = byteVectorValues.vectorValue(); + byte[] vectorValue = byteVectorValues.vectorValue(ord); for (byte value : vectorValue) { b.value(value); } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 16aada4066f71..1560004b13785 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -412,8 +412,8 @@ public Query createPhraseQuery(String field, String queryText, int phraseSlop) { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { CombinedFieldQuery.Builder query = new CombinedFieldQuery.Builder(); for (TermAndBoost termAndBoost : terms) { - assert termAndBoost.boost == BoostAttribute.DEFAULT_BOOST; - BytesRef bytes = termAndBoost.term; + assert termAndBoost.boost() == BoostAttribute.DEFAULT_BOOST; + BytesRef bytes = termAndBoost.term(); query.addTerm(bytes); } for (FieldAndBoost fieldAndBoost : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index f21edaeb94f22..b2b37ad834178 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -20,7 +20,7 @@ import org.apache.lucene.queries.intervals.IntervalMatchesIterator; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; @@ -189,7 +189,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -204,7 +204,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 55642ccf0275a..626875c75a5fe 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -443,12 +443,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 6072a81691ffa..30921d22a8d82 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -10,9 +10,12 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import java.util.Locale; +import static org.apache.lucene.util.automaton.RegExp.DEPRECATED_COMPLEMENT; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *

    @@ -37,8 +40,11 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} + * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 + * https://github.com/elastic/elasticsearch/issues/113465 */ - COMPLEMENT(RegExp.COMPLEMENT), + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_FOUNDATIONS) + COMPLEMENT(DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} @@ -63,7 +69,7 @@ public enum RegexpFlag { /** * Enables all available option flags */ - ALL(RegExp.ALL); + ALL(RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); final int value; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index dc439fab58ffc..461dc66322434 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -280,7 +280,9 @@ protected Query doToQuery(SearchExecutionContext context) throws QueryShardExcep int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0; Query query = null; // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) - int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; + // We need to preserve the DEPRECATED_COMPLEMENT for now though + int deprecatedComplementFlag = syntaxFlagsValue & RegExp.DEPRECATED_COMPLEMENT; + int sanitisedSyntaxFlag = syntaxFlagsValue & (RegExp.ALL | deprecatedComplementFlag); MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index c96771978bd42..8d3fd1d92e1e7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; @@ -184,7 +185,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(new DocValuesDocReader(lookup, context)); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -201,7 +202,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 81afdf0ebe5e0..a6116ccf2c495 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -273,8 +272,8 @@ protected Query doToQuery(SearchExecutionContext context) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } // Fail before we attempt to create the term queries: - if (values.size() > BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 528f0bd6dae08..1327721a88427 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -149,7 +149,7 @@ private static Response wrapSearchResponse(SearchResponse response) { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index 505c20f642093..5f135c674ba1a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.QueryBuilder; @@ -690,7 +691,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -708,7 +709,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -722,14 +723,14 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 52122ed86ef69..446d78078e642 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -200,7 +200,7 @@ protected Query createBooleanPrefixQuery(String field, String queryText, Boolean protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index db0077284bbd3..96e8ac35c8e32 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -24,8 +24,6 @@ import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested @@ -55,15 +53,10 @@ public boolean mightMatchNestedDocs(Query query) { // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNestedDocs(tis.getField()); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNestedDocs(tis.getField()); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); @@ -75,13 +68,13 @@ public boolean mightMatchNestedDocs(Query query) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { @@ -122,15 +115,10 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNonNestedDocs(tis.getField(), nestedPath); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNonNestedDocs(tis.getField(), nestedPath); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); @@ -142,13 +130,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { @@ -183,5 +171,4 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { } return true; } - } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 76dba60689422..d237a03335337 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -760,7 +760,14 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return currentFieldType.regexpQuery( + termStr, + RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT, + 0, + getDeterminizeWorkLimit(), + getMultiTermRewriteMethod(), + context + ); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java index 562bf1e75dc1f..97d1b3342ca2b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java @@ -33,7 +33,7 @@ public static Tuple getClea final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); @@ -64,7 +64,7 @@ public static void execute(Terminal terminal, Directory indexDirectory, Lock wri final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index f1291ac6faa51..94a29258f3202 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -73,7 +74,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -82,87 +83,102 @@ public Scorer scorer(LeafReaderContext context) throws IOException { int targetShardId = indexRouting.getShard(Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; - if (terms == null) { - // this is the common case - no partitioning and no _routing values - // in this case we also don't do anything special with regards to nested docs since we basically delete - // by ID and parent and nested all have the same id. - assert indexMetadata.isRoutingPartitionedIndex() == false; - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); - } else { - final BitSet parentBitSet; - if (nestedParentBitSetProducer == null) { - parentBitSet = null; - } else { - parentBitSet = nestedParentBitSetProducer.getBitSet(context); - if (parentBitSet == null) { - return null; // no matches - } - } - if (indexMetadata.isRoutingPartitionedIndex()) { - // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing - // this index is routing partitioned. - Visitor visitor = new Visitor(leafReader); - TwoPhaseIterator twoPhaseIterator = parentBitSet == null - ? new RoutingPartitionedDocIdSetIterator(visitor) - : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); - } else { - // here we potentially guard the docID consumers with our parent bitset if we have one. - // this ensures that we are only marking root documents in the nested case and if necessary - // we do a second pass to mark the corresponding children in markChildDocs - Function maybeWrapConsumer = consumer -> { - if (parentBitSet != null) { - return docId -> { - if (parentBitSet.get(docId)) { - consumer.accept(docId); + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + if (terms == null) { + // this is the common case - no partitioning and no _routing values + // in this case we also don't do anything special with regards to nested docs since we basically delete + // by ID and parent and nested all have the same id. + assert indexMetadata.isRoutingPartitionedIndex() == false; + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); + } else { + final BitSet parentBitSet; + if (nestedParentBitSetProducer == null) { + parentBitSet = null; + } else { + parentBitSet = nestedParentBitSetProducer.getBitSet(context); + if (parentBitSet == null) { + return null; // no matches + } + } + if (indexMetadata.isRoutingPartitionedIndex()) { + // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing + // this index is routing partitioned. + Visitor visitor = new Visitor(leafReader); + TwoPhaseIterator twoPhaseIterator = parentBitSet == null + ? new RoutingPartitionedDocIdSetIterator(visitor) + : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + } else { + // here we potentially guard the docID consumers with our parent bitset if we have one. + // this ensures that we are only marking root documents in the nested case and if necessary + // we do a second pass to mark the corresponding children in markChildDocs + Function maybeWrapConsumer = consumer -> { + if (parentBitSet != null) { + return docId -> { + if (parentBitSet.get(docId)) { + consumer.accept(docId); + } + }; } + return consumer; }; - } - return consumer; - }; - // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete - findSplitDocs(RoutingFieldMapper.NAME, ref -> { - int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); - return shardId == targetShardId; - }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - - // TODO have the IndexRouting build the query and pass routingRequired in - boolean routingRequired = indexMetadata.mapping() == null ? false : indexMetadata.mapping().routingRequired(); - // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones - // with a routing value from the next iteration and delete / select based on the ID. - if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { - /* - * This is a special case where some docs don't have routing values. - * It's annoying, but it's allowed to build an index where some documents - * hve routing and others don't. - * - * Luckily, if the routing field is required in the mapping then we can - * safely assume that all documents which are don't have a routing are - * nested documents. And we pick those up later based on the assignment - * of the document that contains them. - */ - FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs( - RoutingFieldMapper.NAME, - Predicates.never(), - leafReader, - maybeWrapConsumer.apply(hasRoutingValue::set) - ); - IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { - if (hasRoutingValue.get(docId) == false) { - bitSetConsumer.accept(docId); + // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to + // delete + findSplitDocs(RoutingFieldMapper.NAME, ref -> { + int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); + return shardId == targetShardId; + }, leafReader, maybeWrapConsumer.apply(bitSet::set)); + + // TODO have the IndexRouting build the query and pass routingRequired in + boolean routingRequired = indexMetadata.mapping() == null + ? false + : indexMetadata.mapping().routingRequired(); + // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the + // ones + // with a routing value from the next iteration and delete / select based on the ID. + if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { + /* + * This is a special case where some docs don't have routing values. + * It's annoying, but it's allowed to build an index where some documents + * hve routing and others don't. + * + * Luckily, if the routing field is required in the mapping then we can + * safely assume that all documents which are don't have a routing are + * nested documents. And we pick those up later based on the assignment + * of the document that contains them. + */ + FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); + IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { + if (hasRoutingValue.get(docId) == false) { + bitSetConsumer.accept(docId); + } + }); } - }); + } + if (parentBitSet != null) { + // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. + markChildDocs(parentBitSet, bitSet); + } } + + return new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); } - if (parentBitSet != null) { - // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. - markChildDocs(parentBitSet, bitSet); - } - } - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + @Override + public long cost() { + return leafReader.maxDoc(); + } + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 3dc5953e3d3d8..bc94db13074db 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -33,6 +33,7 @@ import java.nio.file.Path; import java.util.HashSet; import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { @@ -67,12 +68,12 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index // Use Lucene defaults final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); if (primaryDirectory instanceof MMapDirectory mMapDirectory) { - return new HybridDirectory(lockFactory, setPreload(mMapDirectory, lockFactory, preLoadExtensions)); + return new HybridDirectory(lockFactory, setPreload(mMapDirectory, preLoadExtensions)); } else { return primaryDirectory; } case MMAPFS: - return setPreload(new MMapDirectory(location, lockFactory), lockFactory, preLoadExtensions); + return setPreload(new MMapDirectory(location, lockFactory), preLoadExtensions); case SIMPLEFS: case NIOFS: return new NIOFSDirectory(location, lockFactory); @@ -81,17 +82,23 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index } } - public static MMapDirectory setPreload(MMapDirectory mMapDirectory, LockFactory lockFactory, Set preLoadExtensions) - throws IOException { - assert mMapDirectory.getPreload() == false; + /** Sets the preload, if any, on the given directory based on the extensions. Returns the same directory instance. */ + // visibility and extensibility for testing + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + mMapDirectory.setPreload(getPreloadFunc(preLoadExtensions)); + return mMapDirectory; + } + + /** Gets a preload function based on the given preLoadExtensions. */ + static BiPredicate getPreloadFunc(Set preLoadExtensions) { if (preLoadExtensions.isEmpty() == false) { if (preLoadExtensions.contains("*")) { - mMapDirectory.setPreload(true); + return MMapDirectory.ALL_FILES; } else { - return new PreLoadMMapDirectory(mMapDirectory, lockFactory, preLoadExtensions); + return (name, context) -> preLoadExtensions.contains(FileSwitchDirectory.getExtension(name)); } } - return mMapDirectory; + return MMapDirectory.NO_FILES; } /** @@ -116,6 +123,8 @@ public IndexInput openInput(String name, IOContext context) throws IOException { // we need to do these checks on the outer directory since the inner doesn't know about pending deletes ensureOpen(); ensureCanRead(name); + // we switch the context here since mmap checks for the READONCE context by identity + context = context == Store.READONCE_CHECKSUM ? IOContext.READONCE : context; // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise // we might run into trouble with files that are pendingDelete in one directory but still // listed in listAll() from the other. We on the other hand don't want to list files from both dirs @@ -162,50 +171,4 @@ MMapDirectory getDelegate() { return delegate; } } - - // TODO it would be nice to share code between PreLoadMMapDirectory and HybridDirectory but due to the nesting aspect of - // directories here makes it tricky. It would be nice to allow MMAPDirectory to pre-load on a per IndexInput basis. - static final class PreLoadMMapDirectory extends MMapDirectory { - private final MMapDirectory delegate; - private final Set preloadExtensions; - - PreLoadMMapDirectory(MMapDirectory delegate, LockFactory lockFactory, Set preload) throws IOException { - super(delegate.getDirectory(), lockFactory); - super.setPreload(false); - this.delegate = delegate; - this.delegate.setPreload(true); - this.preloadExtensions = preload; - assert getPreload() == false; - } - - @Override - public void setPreload(boolean preload) { - throw new IllegalArgumentException("can't set preload on a preload-wrapper"); - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - if (useDelegate(name)) { - // we need to do these checks on the outer directory since the inner doesn't know about pending deletes - ensureOpen(); - ensureCanRead(name); - return delegate.openInput(name, context); - } - return super.openInput(name, context); - } - - @Override - public synchronized void close() throws IOException { - IOUtils.close(super::close, delegate); - } - - boolean useDelegate(String name) { - final String extension = FileSwitchDirectory.getExtension(name); - return preloadExtensions.contains(extension); - } - - MMapDirectory getDelegate() { - return delegate; - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index a1038356735f0..c3d21b23d6a49 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -147,7 +148,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.READONCE, true); + public static final IOContext READONCE_CHECKSUM = createReadOnceContext(); + + // while equivalent, these different read once contexts are checked by identity in directory implementations + private static IOContext createReadOnceContext() { + var context = IOContext.READONCE.withReadAdvice(ReadAdvice.SEQUENTIAL); + assert context != IOContext.READONCE; + assert context.equals(IOContext.READONCE); + return context; + } private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -632,7 +641,7 @@ private static void failIfCorrupted(Directory directory) throws IOException { List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; @@ -919,7 +928,10 @@ private static void checksumFromLuceneFile( boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, READONCE_CHECKSUM)) { + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + var context = file.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : READONCE_CHECKSUM; + try (IndexInput in = directory.openInput(file, context)) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index 2be9d0f224e24..501c2496aacb6 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -195,7 +195,7 @@ public BytesRef hash() { * * This ID may be {@link StoreFileMetadata#UNAVAILABLE_WRITER_UUID} (i.e. zero-length) if unavailable, e.g.: * - * - The file was written by a version of Lucene prior to {@link org.apache.lucene.util.Version#LUCENE_8_6_0}. + * - The file was written by a version of Lucene prior to 8.6.0. * - The metadata came from a version of Elasticsearch prior to {@link StoreFileMetadata#WRITER_UUID_MIN_VERSION}). * - The file is not one of the files listed above. * diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 763abb41797b5..db84be817bbd7 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -96,7 +96,7 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + termVectorsByField = docIdAndVersion.reader.termVectors().get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -301,7 +301,7 @@ private static Fields generateTermVectors( } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java index 29161814e7724..525da1670f900 100644 --- a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java @@ -94,7 +94,7 @@ static Automaton buildAutomaton(String pattern) { String output = pattern; output = output.replace(".", "\\."); output = output.replace("*", ".*"); - return new RegExp(output).toAutomaton(); + return new RegExp(output, RegExp.ALL | RegExp.ALL).toAutomaton(); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index abba6ec6ae684..9bca59e9e4d62 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -12,13 +12,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.ShardCoreKeyMap; @@ -173,24 +171,12 @@ public int count(LeafReaderContext context) throws IOException { return in.count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return in.isCacheable(ctx); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 08148de0591cb..f3456870114f5 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -41,6 +41,8 @@ import java.util.Objects; import java.util.Set; +import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; + /** * Uses a pattern string to define a protected space for indices belonging to a system feature, and, if needed, provides metadata for * managing indices that match the pattern. @@ -360,7 +362,7 @@ protected SystemIndexDescriptor( this.primaryIndex = primaryIndex; this.aliasName = aliasName; - final Automaton automaton = buildAutomaton(indexPattern, aliasName); + final Automaton automaton = Operations.determinize(buildAutomaton(indexPattern, aliasName), DEFAULT_DETERMINIZE_WORK_LIMIT); this.indexPatternAutomaton = new CharacterRunAutomaton(automaton); if (primaryIndex != null && indexPatternAutomaton.run(primaryIndex) == false) { throw new IllegalArgumentException("primary index does not match the index pattern!"); @@ -883,15 +885,15 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); + final Automaton patternAutomaton = new RegExp(patternAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); if (aliasAsRegex == null) { return patternAutomaton; } - final Automaton aliasAutomaton = new RegExp(aliasAsRegex).toAutomaton(); + final Automaton aliasAutomaton = new RegExp(aliasAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); - return Operations.union(patternAutomaton, aliasAutomaton); + return Operations.determinize(Operations.union(patternAutomaton, aliasAutomaton), DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index a0a4388a4d54a..856b30d1c19e8 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; @@ -178,7 +177,7 @@ public SystemIndices(List pluginAndModuleFeatures) { this.netNewSystemIndexAutomaton = buildNetNewIndexCharacterRunAutomaton(featureDescriptors); this.productToSystemIndicesMatcher = getProductToSystemIndicesMap(featureDescriptors); this.executorSelector = new ExecutorSelector(this); - this.systemNameAutomaton = MinimizationOperations.minimize( + this.systemNameAutomaton = Operations.determinize( Operations.union(List.of(systemIndexAutomata, systemDataStreamIndicesAutomata, buildDataStreamAutomaton(featureDescriptors))), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); @@ -264,9 +263,7 @@ private static Map getProductToSystemIndicesMap(M .collect( Collectors.toUnmodifiableMap( Entry::getKey, - entry -> new CharacterRunAutomaton( - MinimizationOperations.minimize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ) + entry -> new CharacterRunAutomaton(Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)) ) ); } @@ -426,7 +423,7 @@ private static Automaton buildIndexAutomaton(Map featureDescrip .stream() .map(SystemIndices::featureToIndexAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map featureDescriptors) { @@ -437,9 +434,7 @@ private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); - return new CharacterRunAutomaton( - MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ); + return new CharacterRunAutomaton(Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton featureToIndexAutomaton(Feature feature) { @@ -459,7 +454,7 @@ private static Automaton buildDataStreamAutomaton(Map featureDe .map(dsName -> SystemIndexDescriptor.buildAutomaton(dsName, null)) .reduce(Operations::union); - return automaton.isPresent() ? MinimizationOperations.minimize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; + return automaton.isPresent() ? Operations.determinize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; } private static Predicate buildDataStreamNamePredicate(Map featureDescriptors) { @@ -472,7 +467,7 @@ private static Automaton buildDataStreamBackingIndicesAutomaton(Map 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); } + ord = (int) sorted.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index ed07525c1dd7b..443963dd59dcd 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -170,10 +170,10 @@ public static TopFieldGroups merge(Sort sort, int start, int size, TopFieldGroup final TopFieldGroups shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index dca4ff503c788..67ece200c06ee 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -61,7 +61,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; @@ -106,7 +106,8 @@ public float matchCost() { return 4; // at most 4 comparisons } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), scoreMode, iterator)); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index c75c6e2373f25..788bf76087d1f 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -188,7 +189,11 @@ private static TermStates adjustTTF(IndexReaderContext readerContext, TermStates int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -212,7 +217,11 @@ private static TermStates adjustDF(IndexReaderContext readerContext, TermStates } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java index 8e85c1d974382..13b0bf650a39e 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,17 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo throw new IllegalStateException("Executing against a different reader than the query has been rewritten against"); } return new ConstantScoreWeight(this, boost) { + @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1a8ac203f0cb5..6575f7f416bd9 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -67,8 +68,8 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); } @@ -80,7 +81,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index bd64ee88fc300..064f8ef3eacd8 100644 --- a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -109,14 +109,8 @@ private ConstantScoreWeight getStandardWeight(ScoreMode scoreMode, float boost) final Component2D component2D = create(geometries); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -125,7 +119,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final GeometryDocValueReader reader = new GeometryDocValueReader(); final Component2DVisitor visitor = Component2DVisitor.getVisitor(component2D, relation, encoder); @@ -143,7 +137,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -167,14 +161,8 @@ private ConstantScoreWeight getContainsWeight(ScoreMode scoreMode, float boost) } return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -183,7 +171,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final Component2DVisitor[] visitors = new Component2DVisitor[components2D.size()]; for (int i = 0; i < components2D.size(); i++) { @@ -210,7 +198,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 9b33ac2ea12fc..8e66486329577 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -415,8 +415,8 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 5df7a8ea20f54..435bf71e3b2c9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -4029,7 +4029,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index a457ebb67fd47..09f31abb58eb3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -72,7 +72,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -90,7 +90,7 @@ protected Table getTableWithHeader(final RestRequest request) { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 23da666a39a7e..c1a55874bfc58 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -87,7 +87,7 @@ public RestResponse buildResponse(SearchResponse response, XContentBuilder build if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index c8129717b5ccd..6c7d36ee9a436 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -116,6 +116,11 @@ public void setDocument(int docid) { this.docId = docid; } + /** Get the current document. */ + public int docId() { + return docId; + } + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { diff --git a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java index c80a2e7200ecc..d83530e82b16d 100644 --- a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java @@ -46,9 +46,8 @@ public void setDocument(int docID) { public void execute() { try { if (hasValue) { - long ord; - while ((ord = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef bytesRef = sortedSetDocValues.lookupOrd(ord); + for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) { + BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd()); emit(bytesRef.utf8ToString()); } } diff --git a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java index 6297fbaa23187..d9550dd17a058 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java @@ -157,7 +157,8 @@ public SortedSetIpSupplier(SortedSetDocValues in) { public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (int i = 0; i < in.docValueCount(); i++) { + long ord = in.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java index fd7c5227e22ac..be1b972dcd41a 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -19,7 +20,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected ByteVectorValues input; // null if no vectors + protected final ByteVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; // null if no vectors protected byte[] vector; protected final int dims; @@ -31,6 +33,7 @@ protected ByteKnnDenseVectorDocValuesField(@Nullable ByteVectorValues input, Str super(name, elementType); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -38,15 +41,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index c7678b03dd8c5..3e38092200511 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -20,7 +21,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected FloatVectorValues input; // null if no vectors + protected final FloatVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; protected float[] vector; protected final int dims; @@ -28,6 +30,7 @@ public KnnDenseVectorDocValuesField(@Nullable FloatVectorValues input, String na super(name, ElementType.FLOAT); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -35,15 +38,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index 49480816bbbb1..5ac25fe0ff695 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -477,11 +477,11 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { - long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - maxOrd = ord; + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextOrd(); } - return Math.toIntExact(maxOrd); + return Math.toIntExact(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 6a89d66bb3411..beac39c2de304 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -16,8 +16,11 @@ import java.util.Set; public final class SearchFeatures implements FeatureSpecification { + + public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 8ff5de3c9b8ac..896dd7f999949 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -288,12 +288,12 @@ public Iterator toXContentChunked(ToXContent.Params params return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); b.field(Fields.TOTAL, total); } else if (totalHits != null) { b.startObject(Fields.TOTAL); - b.field("value", totalHits.value); - b.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); b.endObject(); } return b; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index f66f6b4a3805d..624db3f1cfe8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -201,6 +200,7 @@ private static class MultiLeafBucketCollector extends LeafBucketCollector { private final boolean cacheScores; private final LeafBucketCollector[] collectors; private int numCollectors; + private ScoreCachingScorable scorable; private MultiLeafBucketCollector(List collectors, boolean cacheScores) { this.collectors = collectors.toArray(new LeafBucketCollector[collectors.size()]); @@ -211,11 +211,11 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); + scorable = new ScoreCachingScorable(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; - c.setScorer(scorer); + c.setScorer(cacheScores ? scorable : scorer); } } @@ -227,6 +227,9 @@ private void removeCollector(int i) { @Override public void collect(int doc, long bucket) throws IOException { + if (scorable != null) { + scorable.curDoc = doc; + } final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; for (int i = 0; i < numCollectors;) { @@ -244,4 +247,25 @@ public void collect(int doc, long bucket) throws IOException { } } } + + private static class ScoreCachingScorable extends Scorable { + + private final Scorable in; + private int curDoc = -1; // current document + private int scoreDoc = -1; // document that score was computed on + private float score; + + ScoreCachingScorable(Scorable in) { + this.in = in; + } + + @Override + public float score() throws IOException { + if (curDoc != scoreDoc) { + score = in.score(); + scoreDoc = curDoc; + } + return score; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2e9e04eca4afc..9ee15306ce636 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -278,7 +278,7 @@ private static boolean isMaybeMultivalued(LeafReaderContext context, SortField s * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 0d0d2c6f922e8..dcc2ad52cbc50 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -41,8 +41,6 @@ import java.util.List; import java.util.function.BiConsumer; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - /** * A {@link SingleDimensionValuesSource} for global ordinals. */ @@ -247,9 +245,8 @@ public DocIdSetIterator competitiveIterator() { @Override public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - currentValue = ord; + for (int i = 0; i < dvs.docValueCount(); i++) { + currentValue = dvs.nextOrd(); next.collect(doc, bucket); } } else if (missingBucket) { @@ -306,8 +303,8 @@ public void collect(int doc, long bucket) throws IOException { public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + for (int i = 0; i < dvs.docValueCount(); i++) { + long ord = dvs.nextOrd(); if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index f774f67b3df8f..af4d60bf424a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -39,7 +39,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; class CountedTermsAggregator extends TermsAggregator { @@ -77,7 +76,8 @@ private LeafBucketCollector getLeafCollector(SortedSetDocValues ords, LeafBucket @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (ords.advanceExact(doc)) { - for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + for (int i = 0; i < ords.docValueCount(); i++) { + long ord = ords.nextOrd(); collectOrdinal(bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)), doc, sub); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java index 2969b7bf82c80..7dd192b317a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -21,7 +20,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -117,15 +115,6 @@ public int count(LeafReaderContext context) throws IOException { return multiValuedSegmentWeight().count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /* @@ -144,19 +133,6 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return multiValuedSegmentWeight().scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - PointValues points = context.reader().getPointValues(field); - if (points == null) { - return null; - } - if (points.size() == points.getDocCount()) { - // Each doc that has points has exactly one point. - return singleValuedSegmentWeight().bulkScorer(context); - } - return multiValuedSegmentWeight().bulkScorer(context); - } - private Weight singleValuedSegmentWeight() throws IOException { if (singleValuedSegmentWeight == null) { singleValuedSegmentWeight = delegateForSingleValuedSegments.createWeight(searcher, scoreMode, boost); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index 282c09c84414c..e8e33655d47c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; @@ -215,7 +216,7 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - scorer.score(counter, live); + scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } @@ -228,7 +229,7 @@ void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws I // No hits in this segment. return; } - scorer.score(collector, live); + scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 4c87b5961ac1a..b5d3485e72f82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket.global; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -45,6 +46,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); + scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { @@ -55,7 +57,7 @@ public void collect(int doc) throws IOException { public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs()); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 12182a5931a4f..0fbb9745aa400 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -197,7 +197,6 @@ void processBufferedChildBuckets() throws IOException { } for (; childDocId < currentParentDoc; childDocId = childDocs.nextDoc()) { - cachedScorer.doc = childDocId; for (var bucket : bucketBuffer) { collectBucket(sub, childDocId, bucket); } @@ -207,19 +206,12 @@ void processBufferedChildBuckets() throws IOException { } private static class CachedScorable extends Scorable { - int doc; float score; @Override public final float score() { return score; } - - @Override - public int docID() { - return doc; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 2f18d2dc1e42e..6119af3cb6a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -157,7 +157,8 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas this.collector = (doc, bucket) -> { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 0cfad5ba9e0c7..37cee75c11b48 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size, Integer.MAX_VALUE); + return new TopScoreDocCollectorManager(size, null, Integer.MAX_VALUE, false).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation @@ -214,7 +214,6 @@ class PerSegmentCollects extends Scorable { private final AggregationExecutionContext aggCtx; int maxDocId = Integer.MIN_VALUE; private float currentScore; - private int currentDocId = -1; private Scorable currentScorer; PerSegmentCollects(AggregationExecutionContext aggCtx) throws IOException { @@ -249,7 +248,6 @@ public void replayRelatedMatches(List sd) throws IOException { leafCollector.setScorer(this); currentScore = 0; - currentDocId = -1; if (maxDocId < 0) { return; } @@ -259,7 +257,6 @@ public void replayRelatedMatches(List sd) throws IOException { int rebased = scoreDoc.doc - aggCtx.getLeafReaderContext().docBase; if ((rebased >= 0) && (rebased <= maxDocId)) { currentScore = scoreDoc.score; - currentDocId = rebased; // We stored the bucket ID in Lucene's shardIndex property // for convenience. leafCollector.collect(rebased, scoreDoc.shardIndex); @@ -276,11 +273,6 @@ public float score() throws IOException { return currentScore; } - @Override - public int docID() { - return currentDocId; - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index ef4101892a461..539b9440cea25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -118,12 +118,12 @@ public long longValue() throws IOException { @Override public boolean advanceExact(int target) throws IOException { if (globalOrds.advanceExact(target)) { - value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } + value = globalOrds.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java index ed39f41d9daed..89fe1a53a01cc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SplittableRandom random = new SplittableRandom(BitMixer.mix(hash ^ seed)); int maxDoc = context.reader().maxDoc(); - return new ConstantScoreScorer( - this, + Scorer scorer = new ConstantScoreScorer( boost, ScoreMode.COMPLETE_NO_SCORES, new RandomSamplingIterator(maxDoc, p, random::nextInt) ); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 1b0ec8e356082..0f7c61dc9f25b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -53,7 +53,6 @@ import java.util.function.LongPredicate; import java.util.function.LongUnaryOperator; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; /** @@ -167,7 +166,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -179,7 +179,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -350,7 +351,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (int i = 0; i < segmentOrds.docValueCount(); i++) { + long segmentOrd = segmentOrds.nextOrd(); int docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } @@ -524,7 +526,8 @@ private void forEachExcludeDeletedDocs(BucketInfoConsumer consumer) throws IOExc if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } @@ -634,7 +637,8 @@ void forEachExcludeDeletedDocs(long owningBucketOrd, BucketInfoConsumer consumer if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 4d78df2704740..4bcbe08ed227c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -358,8 +358,8 @@ public IncludeExclude( if (exclude != null && excludeValues != null) { throw new IllegalArgumentException(); } - this.include = include == null ? null : new RegExp(include); - this.exclude = exclude == null ? null : new RegExp(exclude); + this.include = include == null ? null : new RegExp(include, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); + this.exclude = exclude == null ? null : new RegExp(exclude, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); this.includeValues = includeValues; this.excludeValues = excludeValues; this.incZeroBasedPartition = 0; @@ -529,7 +529,7 @@ private Automaton toAutomaton() { if (exclude != null) { a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - return a; + return Operations.determinize(a, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public StringFilter convertToStringFilter(DocValueFormat format) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 84cd869517702..05aa80f06448d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -308,7 +308,8 @@ public void collect(int doc, long bucketOrd) throws IOException { bits = new BitArray(maxOrd, bigArrays); visitedOrds.set(bucketOrd, bits); } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index 32be4513f5c3e..d0685b3a09262 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -259,8 +259,8 @@ public CompetitiveIterator competitiveIterator() { @Override public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues - .nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); if (bits.getAndSet(ord) == false) { competitiveIterator.onVisitedOrdinal(ord); } @@ -309,7 +309,8 @@ public void collect(int doc, long bucketOrd) throws IOException { public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { final BitArray bits = getNewOrExistingBitArray(bucketOrd); - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index f7f319618fa36..8ff381cbbc84d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -135,7 +135,7 @@ public InternalAggregation get() { maxScore = reduceAndFindMaxScore(aggregations, shardDocs); reducedTopDocs = TopDocs.merge(from, size, shardDocs); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( getName(), @@ -262,8 +262,8 @@ public boolean equals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -287,8 +287,8 @@ public int hashCode() { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index d59d824bde435..90d6c298fbd23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -51,7 +51,7 @@ public static boolean hasValue(InternalTDigestPercentiles agg) { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 9d8d98bc7c7cc..87d8f839dfca1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -19,8 +19,10 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; @@ -136,12 +138,14 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false) + .newCollector(); + collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + new TopFieldCollectorManager(sort.sort, topN, null, Integer.MAX_VALUE, false).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 4724bd0db05df..9b47507628dd1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -271,18 +271,17 @@ public long nextOrd() throws IOException { if (hasOrds) { return values.nextOrd(); } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -321,7 +320,11 @@ public BytesRef lookupOrd(long ord) throws IOException { @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -339,12 +342,7 @@ public long nextOrd() throws IOException { return ord + 1; } } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 742d366efa7a3..472619da78622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -104,7 +104,7 @@ private void search(BucketCollector bucketCollector, Weight weight) throws IOExc Scorer scorer = weight.scorer(leaf); if (scorer != null) { if (minimumScore != null) { - scorer = new MinScoreScorer(weight, scorer, minimumScore); + scorer = new MinScoreScorer(scorer, minimumScore); } LeafWalker leafWalker = new LeafWalker(leaf, scorer, bucketCollector, () -> tsidOrd[0]); if (leafWalker.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 42b29fda3c472..769effdd60240 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -76,7 +76,7 @@ private static CharacterRunAutomaton nestedChildrenAutomaton(List nested for (String child : nestedChildren) { automata.add(Operations.concatenate(Automata.makeString(child + "."), Automata.makeAnyString())); } - return new CharacterRunAutomaton(Operations.union(automata)); + return new CharacterRunAutomaton(Operations.determinize(Operations.union(automata), AUTOMATON_MAX_DETERMINIZED_STATES)); } // Builds an automaton that will match any field that conforms to one of the input patterns @@ -84,7 +84,11 @@ private static CharacterRunAutomaton buildUnmappedFieldPatternAutomaton(List subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, (o1, o2) -> { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return Integer.compare(startOffset, startOffset2); }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index da5d2d093fbd8..78d90377cdc3f 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -56,6 +56,7 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.stream.Collectors; /** * Context-aware extension of {@link IndexSearcher}. @@ -76,6 +77,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; + private final boolean hasExecutor; private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -133,6 +135,7 @@ public ContextIndexSearcher( int minimumDocsPerSlice ) throws IOException { super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); + this.hasExecutor = executor != null; setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); @@ -141,6 +144,15 @@ public ContextIndexSearcher( this.maximumNumberOfSlices = maximumNumberOfSlices; } + /** + * Whether an executor was provided at construction time or not. This indicates whether operations that support concurrency + * may be executed concurrently. It is not straightforward to deduct this from {@link #getTaskExecutor()} because {@link IndexSearcher} + * creates a {@link org.apache.lucene.search.TaskExecutor} anyways. + */ + public boolean hasExecutor() { + return hasExecutor; + } + @Override protected LeafSlice[] slices(List leaves) { // we offload to the executor unconditionally, including requests that don't support concurrency @@ -149,11 +161,6 @@ protected LeafSlice[] slices(List leaves) { return leafSlices; } - // package private for testing - int getMinimumDocsPerSlice() { - return minimumDocsPerSlice; - } - public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } @@ -243,7 +250,14 @@ public static LeafSlice[] computeSlices(List leaves, int maxS throw new IllegalArgumentException("maxSliceNum must be >= 1 (got " + maxSliceNum + ")"); } if (maxSliceNum == 1) { - return new LeafSlice[] { new LeafSlice(new ArrayList<>(leaves)) }; + return new LeafSlice[] { + new LeafSlice( + new ArrayList<>( + leaves.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ) + ) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -291,7 +305,11 @@ private static LeafSlice[] computeSlices(List leaves, int min for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf); + slices[upto++] = new LeafSlice( + currentLeaf.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ); } return slices; @@ -344,10 +362,10 @@ private T search(Weight weight, CollectorManager } final List> listTasks = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length; ++i) { - final LeafReaderContext[] leaves = leafSlices[i].leaves; + final LeafReaderContextPartition[] leaves = leafSlices[i].partitions; final C collector = collectors.get(i); listTasks.add(() -> { - search(Arrays.asList(leaves), weight, collector); + search(leaves, weight, collector); return collector; }); } @@ -364,7 +382,7 @@ private T search(Weight weight, CollectorManager * 2) handles the ES TimeExceededException */ @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] leaves, Weight weight, Collector collector) throws IOException { boolean success = false; try { super.search(leaves, weight, collector); @@ -412,7 +430,7 @@ public static class TimeExceededException extends RuntimeException { } @Override - protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + protected void searchLeaf(LeafReaderContext ctx, int minDocId, int maxDocId, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { @@ -432,7 +450,7 @@ protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collec bulkScorer = new CancellableBulkScorer(bulkScorer, cancellable::checkCancelled); } try { - bulkScorer.score(leafCollector, liveDocs); + bulkScorer.score(leafCollector, liveDocs, minDocId, maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index 3bdd7ff3630cf..64b54d3623f04 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -14,9 +14,9 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.FilterVectorValues; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.QueryTimeout; @@ -32,6 +32,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; +import java.util.Objects; /** * Wraps an {@link IndexReader} with a {@link QueryCancellation} @@ -459,7 +460,6 @@ public void grow(int count) { } private static class ExitableByteVectorValues extends ByteVectorValues { - private int calls; private final QueryCancellation queryCancellation; private final ByteVectorValues in; @@ -479,8 +479,13 @@ public int size() { } @Override - public byte[] vectorValue() throws IOException { - return in.vectorValue(); + public byte[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -505,33 +510,17 @@ public DocIdSetIterator iterator() { } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - final int nextDoc = in.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } @Override - public int advance(int target) throws IOException { - final int advance = in.advance(target); - checkAndThrowWithSampling(); - return advance; - } - - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + public ByteVectorValues copy() throws IOException { + return in.copy(); } } - private static class ExitableFloatVectorValues extends FilterVectorValues { - private int calls; + private static class ExitableFloatVectorValues extends FilterFloatVectorValues { private final QueryCancellation queryCancellation; ExitableFloatVectorValues(FloatVectorValues vectorValues, QueryCancellation queryCancellation) { @@ -541,17 +530,13 @@ private static class ExitableFloatVectorValues extends FilterVectorValues { } @Override - public int advance(int target) throws IOException { - final int advance = super.advance(target); - checkAndThrowWithSampling(); - return advance; + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int nextDoc() throws IOException { - final int nextDoc = super.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -575,13 +560,61 @@ public DocIdSetIterator iterator() { }; } - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + @Override + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); } } + private static KnnVectorValues.DocIndexIterator createExitableIterator( + KnnVectorValues.DocIndexIterator delegate, + QueryCancellation queryCancellation + ) { + return new KnnVectorValues.DocIndexIterator() { + private int calls; + + @Override + public int index() { + return delegate.index(); + } + + @Override + public int docID() { + return delegate.docID(); + } + + @Override + public long cost() { + return delegate.cost(); + } + + @Override + public int nextDoc() throws IOException { + int nextDoc = delegate.nextDoc(); + checkAndThrowWithSampling(); + return nextDoc; + } + + @Override + public int advance(int target) throws IOException { + final int advance = delegate.advance(target); + checkAndThrowWithSampling(); + return advance; + } + + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + }; + } + private static class ExitableDocSetIterator extends DocIdSetIterator { private int calls; private final DocIdSetIterator in; @@ -622,4 +655,43 @@ private void checkAndThrowWithSampling() { } } } + + /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ + private abstract static class FilterFloatVectorValues extends FloatVectorValues { + + /** Wrapped values */ + protected final FloatVectorValues in; + + /** Sole constructor */ + protected FilterFloatVectorValues(FloatVectorValues in) { + Objects.requireNonNull(in); + this.in = in; + } + + @Override + public DocIndexIterator iterator() { + return in.iterator(); + } + + @Override + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java index 9b594e2935504..f03be3f09b7d2 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java @@ -103,15 +103,6 @@ static final class FieldUsageTrackingLeafReader extends SequentialStoredFieldsLe this.notifier = notifier; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f != null) { - f = new FieldUsageTrackingTermVectorFields(f); - } - return f; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -136,11 +127,6 @@ public PointValues getPointValues(String field) throws IOException { return pointValues; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { StoredFields storedFields = super.storedFields(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index cd8f381e85f83..f559325063bef 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -25,15 +24,12 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; - private final ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) { - super(w); + ProfileScorer(Scorer scorer, QueryProfileBreakdown profile) { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getNewTimer(QueryTimingType.SCORE); nextDocTimer = profile.getNewTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getNewTimer(QueryTimingType.ADVANCE); @@ -58,11 +54,6 @@ public float score() throws IOException { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 27bf8ea8aae47..5d35699adec95 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -37,15 +37,6 @@ public ProfileWeight(Query query, Weight subQueryWeight, QueryProfileBreakdown p this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final Timer timer = profile.getNewTimer(QueryTimingType.BUILD_SCORER); @@ -67,12 +58,24 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } } + @Override + public BulkScorer bulkScorer() throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(); + } + @Override public long cost() { timer.start(); @@ -90,18 +93,6 @@ public void setTopLevelScoringClause() throws IOException { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index d17cd4f69dec7..af65c30b49dcf 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -248,7 +248,7 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index d1cbdd6adb761..00cf90fe12301 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -202,7 +202,11 @@ public DocIdSetIterator competitiveIterator() throws IOException { } }; } - return new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + LeafCollector leafCollector = new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { + leafCollector = ScoreCachingWrappingScorer.wrap(leafCollector); + } + return leafCollector; } private static FilterScorable wrapToIgnoreMinCompetitiveScore(Scorable scorer) { @@ -263,9 +267,6 @@ private class CompositeLeafCollector implements LeafCollector { @Override public void setScorer(Scorable scorer) throws IOException { - if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); - } // Ignore calls to setMinCompetitiveScore so that if the top docs collector // wants to skip low-scoring hits, the aggs collector still sees all hits. // this is important also for terminate_after in case used when total hits tracking is early terminated. diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index b78d9e40ba120..2cb960e7e73cb 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -106,12 +105,12 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /** * We return a scorer even if there are no ranked documents within the segment. * This ensures the correct propagation of the maximum score. */ - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -180,6 +179,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override @@ -325,11 +325,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return topWeight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return combinedWeight.scorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return combinedWeight.isCacheable(ctx); @@ -340,11 +335,6 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { return combinedWeight.matches(context, doc); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - return combinedWeight.bulkScorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return combinedWeight.scorerSupplier(context); diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java index 5077d68c12baa..c65c2bb6650c1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractFieldScript; @@ -69,10 +70,11 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext ctx) { + public ScorerSupplier scorerSupplier(LeafReaderContext ctx) throws IOException { S scriptContext = scriptContextFunction.apply(ctx); DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc()); - return new ConstantScoreScorer(this, score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java index 751ecb18cc68f..430d22ebc9084 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.SloppyMath; @@ -79,8 +80,9 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + return new DefaultScorerSupplier(scorer); } @Override @@ -116,8 +118,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java index 7c8ac4a8cae63..d18098ee7de33 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractLongFieldScript; @@ -56,8 +57,10 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + return new DefaultScorerSupplier( + new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost) + ); } @Override @@ -84,8 +87,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java index ab32427ed4ac1..3c5931367370e 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -34,7 +35,12 @@ public StringScriptFieldRegexpQuery( script, leafFactory, fieldName, - new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates)) + new ByteRunAutomaton( + Operations.determinize( + new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ) + ) ); this.pattern = pattern; this.syntaxFlags = syntaxFlags; diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java index 5bacaf0d36b55..6c1aa6f72c4a1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -44,7 +45,7 @@ private static Automaton buildAutomaton(Term term, boolean caseInsensitive) { if (caseInsensitive) { return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term); } - return WildcardQuery.toAutomaton(term); + return WildcardQuery.toAutomaton(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java index f4ab7e29e1684..6de888ac8aff4 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -58,9 +59,10 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator iterator = createIterator(context, sliceStart, sliceStart + sliceSize); - return new ConstantScoreScorer(this, boost, scoreMode, iterator); + Scorer scorer = new ConstantScoreScorer(boost, scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } private static DocIdSetIterator createIterator(LeafReaderContext context, int sliceStart, int sliceEnd) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index b66ae219ace97..05cf173468fdc 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -41,7 +42,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -66,7 +67,8 @@ public float matchCost() { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index 3927f54461bb8..9aecbfdd84ee6 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -49,10 +50,11 @@ public TermsSliceQuery(String field, int id, int max) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index a6fd4ef90693d..e60e534d6acaa 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -126,7 +126,6 @@ public void setScorer(Scorable scorer) { @Override protected boolean advanceExact(int doc) throws IOException { - assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; /* We will never be called by documents that don't match the * query and they'll all have a score, thus `true`. */ score = scorer.score(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8fdc33f38934f..fd6cfeaea639b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; @@ -84,7 +85,7 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs()); + scorer.score(leafCollector, context.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 1366da366b068..ed8197786ba7e 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -133,7 +133,7 @@ public CandidateSet drawCandidates(CandidateSet set) throws IOException { * because that's what {@link DirectSpellChecker#suggestSimilar} expects * when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -226,7 +226,7 @@ public void nextToken() throws IOException { } private static double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index a47cd5fe5a84b..0fd3ebcd00865 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -36,7 +36,7 @@ protected double scoreUnigram(Candidate word) throws IOException { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index fe64a65498776..0d66311303080 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -58,7 +58,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 7257a0d972459..21d1f34b68eee 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -70,7 +70,7 @@ public void nextToken() throws IOException { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index d893e0986e0d3..270866c14b20a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -35,7 +35,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -50,7 +50,7 @@ protected double scoreTrigram(Candidate w, Candidate w_1, Candidate w_2) throws join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index 44bbd0f50951c..31e19b6784757 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.Weight; @@ -70,12 +71,12 @@ public Explanation explain(LeafReaderContext leafReaderContext, int i) throws IO } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - VectorScorer vectorScorer = vectorScorer(leafReaderContext); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + VectorScorer vectorScorer = vectorScorer(context); if (vectorScorer == null) { return null; } - return new DenseVectorScorer(this, vectorScorer); + return new DefaultScorerSupplier(new DenseVectorScorer(vectorScorer, boost)); } @Override @@ -178,11 +179,10 @@ static class DenseVectorScorer extends Scorer { private final DocIdSetIterator iterator; private final float boost; - DenseVectorScorer(DenseVectorWeight weight, VectorScorer vectorScorer) { - super(weight); + DenseVectorScorer(VectorScorer vectorScorer, float boost) { this.vectorScorer = vectorScorer; this.iterator = vectorScorer.iterator(); - this.boost = weight.boost; + this.boost = boost; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index 413840f2b451b..9f3d83b4da082 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenByteKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 80704a3b552fe..3907bdf89bc6f 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenFloatKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 14bb94a366e50..9363f67a7350b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnByteVectorQuery(String field, byte[] target, Integer k, int numCands protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index 590d8cfbbaba1..be0437af9131d 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnFloatVectorQuery(String field, float[] target, Integer k, int numCan protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index 06fb109d6580e..bb83b8528c6c8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -88,13 +89,13 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -177,6 +178,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java index 77f60adc4fcd8..5219778047bcd 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; @@ -142,12 +143,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer innerScorer = in.scorer(context); - if (innerScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = in.scorerSupplier(context); + if (inScorerSupplier == null) { return null; } - return new MinScoreScorer(this, innerScorer, docScore, boost); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(inScorerSupplier.get(leadCost), docScore, boost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index b14116b3d55ba..c760e8043e262 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -230,7 +230,7 @@ public void getSynonymSetRules(String synonymSetId, ActionListener { - long totalSynonymRules = countResponse.getHits().getTotalHits().value; + long totalSynonymRules = countResponse.getHits().getTotalHits().value(); if (totalSynonymRules > maxSynonymsSets) { logger.warn( "The number of synonym rules in the synonym set [{}] exceeds the maximum allowed." @@ -265,7 +265,7 @@ public void getSynonymSetRules(String synonymSetId, int from, int size, ActionLi .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(new DelegatingIndexNotFoundActionListener<>(synonymSetId, listener, (searchListener, searchResponse) -> { - final long totalSynonymRules = searchResponse.getHits().getTotalHits().value; + final long totalSynonymRules = searchResponse.getHits().getTotalHits().value(); // If there are no rules, check that the synonym set actually exists to return the proper error if (totalSynonymRules == 0) { checkSynonymSetExists(synonymSetId, searchListener.delegateFailure((existsListener, response) -> { @@ -383,7 +383,7 @@ public void putSynonymRule(String synonymsSetId, SynonymRule synonymRule, Action .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(l1.delegateFailureAndWrap((searchListener, searchResponse) -> { - long synonymsSetSize = searchResponse.getHits().getTotalHits().value; + long synonymsSetSize = searchResponse.getHits().getTotalHits().value(); if (synonymsSetSize >= maxSynonymsSets) { listener.onFailure( new IllegalArgumentException("The number of synonym rules in a synonyms set cannot exceed " + maxSynonymsSets) diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 4e85ba2cf479f..33c8081971202 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,2 +1,3 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec +org.elasticsearch.index.codec.Elasticsearch900Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 65464c7f14a5c..bf4a28b9c60b2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -53,6 +53,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; @@ -327,7 +328,7 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -414,25 +415,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_SPEED; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_COMPRESSION; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene912Codec.Mode mode(); + abstract Lucene100Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(codecMode.mode())); + .setCodec(new Lucene100Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -640,7 +641,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(mode.mode()) { + .setCodec(new Lucene100Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -687,7 +688,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.READ); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); files = directory.listAll(); } else { directory = reader.directory(); @@ -785,14 +786,15 @@ private static class RandomMatchQuery extends Query { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final FixedBitSet bits = new FixedBitSet(context.reader().maxDoc()); for (int i = 0; i < bits.length(); i++) { if (randomBoolean()) { bits.set(i); } } - return new ConstantScoreScorer(this, 1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + Scorer scorer = new ConstantScoreScorer(1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index b7919878f9081..681d9d000beef 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -79,8 +79,8 @@ public void testNullShardResultHandling() throws Exception { queryPhaseResultConsumer.consumeResult(querySearchResult, nextCounter::incrementAndGet); } var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); } @@ -94,8 +94,8 @@ public void testEmptyResults() throws Exception { ) ) { var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertTrue(reducePhase.isEmptyResult()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 90174a89209b8..99401e8a8d40a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -151,11 +151,11 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -236,7 +236,7 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 31ef57482cab1..09dd7821cd123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -116,7 +116,7 @@ public void testShortcutQueryAndFetchOptimization() throws Exception { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -244,7 +244,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -353,7 +353,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -468,7 +468,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -703,7 +703,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 857402d1baaac..9a507977c0123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -297,8 +297,8 @@ public void testMerge() { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -415,8 +415,8 @@ protected boolean lessThan(RankDoc a, RankDoc b) { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } int rank = 1; for (SearchHit hit : mergedResponse.hits().getHits()) { @@ -522,8 +522,8 @@ private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -784,7 +784,7 @@ public void testConsumerConcurrently() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -844,7 +844,7 @@ public void testConsumerOnlyAggs() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(0, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -902,7 +902,7 @@ public void testConsumerOnlyHits() throws Exception { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -969,7 +969,7 @@ public void testReduceTopNWithFromOffset() throws Exception { ScoreDoc[] scoreDocs = reduce.sortedTopDocs().scoreDocs(); assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore(), 0.0f); - assertEquals(12, reduce.totalHits().value); + assertEquals(12, reduce.totalHits().value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1022,7 +1022,7 @@ public void testConsumerSortByField() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertTrue(reduce.sortedTopDocs().isSortedByField()); assertEquals(1, reduce.sortedTopDocs().sortFields().length); @@ -1079,7 +1079,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[2]).fields[0]); @@ -1199,7 +1199,7 @@ public void testConsumerSuggestions() throws Exception { assertEquals(maxScoreCompletion, reduce.sortedTopDocs().scoreDocs()[0].score, 0f); assertEquals(0, reduce.sortedTopDocs().scoreDocs()[0].doc); assertNotEquals(-1, reduce.sortedTopDocs().scoreDocs()[0].shardIndex); - assertEquals(0, reduce.totalHits().value); + assertEquals(0, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -1290,7 +1290,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index d279fa5030a8c..e4284937474c7 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -230,11 +230,11 @@ public void run() { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits().value, equalTo((long) numShards)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo((long) numShards)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs().scoreDocs().length, equalTo(1)); assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 2b0ed0552e594..51796f404c283 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -762,11 +762,11 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -862,8 +862,8 @@ public void testMergeSearchHits() throws InterruptedException { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -910,9 +910,9 @@ public void testMergeNoResponsesAdded() { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -1004,7 +1004,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { assertEquals(2, merger.numResponses()); SearchResponse mergedResponse = merger.getMergedResponse(clusters); try { - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -1032,8 +1032,8 @@ public void testMergeOnlyEmptyHits() { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchResponse searchResponse = new SearchResponse( SearchHits.empty(totalHits, Float.NaN), @@ -1232,7 +1232,7 @@ public void testPartialAggsMixedWithFullResponses() { SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + assertThat(hits.getTotalHits().value(), equalTo(2L)); // should be 2 hits from remote1 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ { @@ -1273,7 +1273,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1414,7 +1414,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1483,7 +1483,7 @@ public void testPartialAggsMixedWithFullResponses() { private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) { TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO); - final int numDocs = (int) totalHits.value; + final int numDocs = (int) totalHits.value(); int scoreFactor = 1; float maxScore = numDocs; int numFields = 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index afc4c6e9eccbf..bbeae6b19b8ac 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -621,8 +621,8 @@ public void testSerialization() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index c00fece686524..1b86e5b00000c 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -400,6 +400,6 @@ protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestD ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(1, scoreDocs.length); - return directoryReader.getTermVectors(scoreDocs[0].doc); + return directoryReader.termVectors().get(scoreDocs[0].doc); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index b8804e9160a75..05382de49087d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -9,6 +9,8 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; @@ -602,6 +604,8 @@ public void testCalculateNumRoutingShards() { public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); + systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 8158917f08187..9300aa992b687 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexOrDocValuesQuery; @@ -44,7 +45,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; @@ -172,10 +172,10 @@ public void testPruneUnreferencedFiles() throws IOException { assertEquals(3, open.maxDoc()); IndexSearcher s = newSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value(), 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); @@ -403,11 +403,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { @@ -464,18 +459,6 @@ public void testAsSequentialBitsUsesRandomAccess() throws IOException { } } - /** - * Test that the "unmap hack" is detected as supported by lucene. - * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 - *

    - * While not guaranteed, current status is "Critical Internal API": http://openjdk.java.net/jeps/260 - * Additionally this checks we did not screw up the security logic around the hack. - */ - public void testMMapHackSupported() throws Exception { - // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); - } - public void testWrapAllDocsLive() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) @@ -508,8 +491,9 @@ public void testWrapAllDocsLive() throws Exception { IndexSearcher searcher = newSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -554,8 +538,9 @@ public void testWrapLiveDocsNotExposeAbortedDocuments() throws Exception { IndexSearcher searcher = newSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 8332ff87a9d57..918dcc1bcbd42 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -127,8 +128,9 @@ public void setUp() throws Exception { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); + StoredFields storedFields = reader.storedFields(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (deletedIds.contains(doc.getField("id").stringValue()) == false) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 7a8d43ebbfd18..55ca666d8588b 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -11,15 +11,11 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.apache.lucene.tests.search.AssertingScorer; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.ESTestCase; @@ -66,27 +62,8 @@ public int advance(int target) throws IOException { }; } - private static Weight fakeWeight() { - return new Weight(new MatchAllDocsQuery()) { - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return null; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - private static Scorer hideTwoPhaseIterator(Scorer in) { - return new Scorer(in.getWeight()) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(in.twoPhaseIterator()); @@ -111,7 +88,7 @@ public int docID() { private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - final Scorer scorer = new Scorer(fakeWeight()) { + final Scorer scorer = new Scorer() { int lastScoredDoc = -1; final float matchCost = (random().nextBoolean() ? 1000 : 0) + random().nextInt(2000); @@ -192,7 +169,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -250,7 +227,7 @@ public void testConjunction() throws Exception { final float minScore; if (randomBoolean()) { minScore = randomFloat(); - MinScoreScorer minScoreScorer = new MinScoreScorer(scorer.getWeight(), scorer, minScore); + MinScoreScorer minScoreScorer = new MinScoreScorer(scorer, minScore); scorers.add(minScoreScorer); } else { scorers.add(scorer); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index b1df24f4db2ad..3894efd0b7d4c 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -117,7 +117,7 @@ public boolean incrementToken() throws IOException { final double boost10 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|10 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -125,7 +125,7 @@ public boolean incrementToken() throws IOException { final double boost1 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|1 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -178,7 +178,7 @@ public void testTopN() throws Exception { expectedTerms[idx++] = new Term("text", text); } for (BooleanClause clause : clauses) { - Term term = ((TermQuery) clause.getQuery()).getTerm(); + Term term = ((TermQuery) clause.query()).getTerm(); assertTrue(Arrays.asList(expectedTerms).contains(term)); } diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 6e8eb47035d43..e0e05c84b5649 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -84,12 +84,12 @@ public void testSimpleNumericOps() throws Exception { try (IndexReader reader = DirectoryReader.open(indexWriter)) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - Document doc = searcher.doc(topDocs.scoreDocs[0].doc); + Document doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.numericValue(), equalTo(2)); topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); - doc = searcher.doc(topDocs.scoreDocs[0].doc); + doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); } @@ -115,7 +115,7 @@ public void testOrdering() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); - searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { + searcher.storedFields().document(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { @Override public Status needsField(FieldInfo fieldInfo) throws IOException { fieldsOrder.add(fieldInfo.name); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 97a6faaa5c6f6..01c4ac3c6fd6a 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -54,7 +54,7 @@ public void testVectorHighlighter() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -87,7 +87,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { IndexReader reader = searcher.getIndexReader(); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -143,7 +143,7 @@ public void testVectorHighlighterNoStore() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -170,7 +170,7 @@ public void testVectorHighlighterNoTermVector() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( diff --git a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java index 022a6994496ae..9419c63f9c48a 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java @@ -227,7 +227,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep } long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index db656b1fc5a94..450d123f551c8 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1776,9 +1777,10 @@ private static void forEachDocument(DirectoryReader reader, Set types, C final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); document.add(new StringField(TYPE_FIELD_NAME, typeName, Field.Store.NO)); consumer.accept(document); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 7b264ac93511b..532a2ff024e8f 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -290,7 +290,7 @@ public void testRefreshActuallyWorks() throws Exception { // we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); + assertEquals(1, search.totalHits.value()); } }); assertFalse(refreshTask.isClosed()); @@ -304,7 +304,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); + assertEquals(2, search.totalHits.value()); } }); prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -312,7 +312,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); + assertEquals(3, search.totalHits.value()); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 10b0b54d2d7e2..9e4a19eb039fd 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -52,7 +52,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index d43a1e09d71a3..12a17f5c263a8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -115,7 +115,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { assertEquals(0, field.nextOrd()); BytesRef scratch = field.lookupOrd(0); assertEquals("value", scratch.utf8ToString()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); } assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); for (int i = 0; i < NUM_DOCS; i++) { @@ -126,7 +125,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { BytesRef scratch = fieldN.lookupOrd(0); assertEquals("value" + i, scratch.utf8ToString()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java new file mode 100644 index 0000000000000..099b59808ef4a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.index.codec.tsdb; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; + +/** Tests ES87TSDBDocValuesFormat with custom skipper interval size. */ +public class ES87TSDBDocValuesFormatVariableSkipIntervalTests extends BaseDocValuesFormatTestCase { + + @Override + protected Codec getCodec() { + // small interval size to test with many intervals + return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat(random().nextInt(4, 16))); + } + + public void testSkipIndexIntervalSize() { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2)) + ); + assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1")); + } + + public void testSkipperAllEqualValue() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int numDocs = atLeast(100); + for (int i = 0; i < numDocs; i++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + skipper.advance(0); + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs, skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on different value + public void testSkipperFewValuesSorted() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + boolean reverse = random().nextBoolean(); + config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int intervals = random().nextInt(2, 10); + final int[] numDocs = new int[intervals]; + for (int i = 0; i < intervals; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", i)); + writer.addDocument(doc); + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + if (reverse) { + for (int i = intervals - 1; i >= 0; i--) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } else { + for (int i = 0; i < intervals; i++) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on empty doc values + public void testSkipperAllEqualValueWithGaps() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + // add doc with empty "dv" + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on multi-values + public void testSkipperAllEqualValueWithMultiValues() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + int docs = random().nextInt(10) + 16; + numDocs[i] += docs; + for (int j = 0; j < docs; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + if (i != gaps - 1) { + // add doc with mutivalues + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + numDocs[i + 1] = 1; + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java index 8f0a306e1eb3b..86b60d9984de5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; @@ -110,8 +111,9 @@ public void testRandom() throws Exception { totalSize += vectorValues.size(); StoredFields storedFields = ctx.reader().storedFields(); int docId; - while ((docId = vectorValues.nextDoc()) != NO_MORE_DOCS) { - byte[] v = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + while ((docId = iterator.nextDoc()) != NO_MORE_DOCS) { + byte[] v = vectorValues.vectorValue(iterator.index()); assertEquals(dimension, v.length); String idString = storedFields.document(docId).getField("id").stringValue(); int id = Integer.parseInt(idString); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index aa50bc26c4443..57cca6eea86ec 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 8cb927036588a..9069b094ee483 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index cee60efb57327..549a14ca6c31b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -19,6 +19,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorSimilarityFunction; @@ -41,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); @@ -68,9 +69,10 @@ public void testAddIndexesDirectory0FS() throws Exception { try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); - assertEquals(0, vectorValues.vectorValue()[0], 0); - assertEquals(NO_MORE_DOCS, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertEquals(0, vectorValues.vectorValue(iterator.index())[0], 0); + assertEquals(NO_MORE_DOCS, iterator.nextDoc()); } } } @@ -110,12 +112,13 @@ private void testAddIndexesDirectory01FS(VectorSimilarityFunction similarityFunc try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); // The merge order is randomized, we might get 1 first, or 2 - float value = vectorValues.vectorValue()[0]; + float value = vectorValues.vectorValue(iterator.index())[0]; assertTrue(value == 1 || value == 2); - assertEquals(1, vectorValues.nextDoc()); - value += vectorValues.vectorValue()[0]; + assertEquals(1, iterator.nextDoc()); + value += vectorValues.vectorValue(iterator.index())[0]; assertEquals(3f, value, 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 90d2584feb3f2..034d428b25209 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index add90ea271fa1..4af6a405c7705 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java index 04d4ef2079b99..cef5e5358f3d5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.VectorScorer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.logging.LogConfigurator; @@ -61,7 +62,7 @@ public void testScore() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) throws IOException { return random().nextFloat(0f, 1000f); @@ -99,7 +100,7 @@ public float[] getCentroid() throws IOException { } @Override - public RandomAccessBinarizedByteVectorValues copy() throws IOException { + public BinarizedByteVectorValues copy() throws IOException { return null; } @@ -115,6 +116,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -209,7 +220,7 @@ public void testScoreEuclidean() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 355.78073f; @@ -375,7 +386,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -389,6 +400,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -806,7 +827,7 @@ public void testScoreMIP() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 0f; @@ -1617,7 +1638,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -1727,6 +1748,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java index 0892436891ff1..42f2fbb383ac9 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -30,6 +30,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.IndexSearcher; @@ -58,7 +59,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedVectorsFormat(); @@ -90,8 +91,8 @@ public void testSearch() throws Exception { float[] queryVector = randomVector(dims); Query q = new KnnFloatVectorQuery(fieldName, queryVector, k); TopDocs collectedDocs = searcher.search(q, k); - assertEquals(k, collectedDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation); + assertEquals(k, collectedDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation()); } } } @@ -148,7 +149,7 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); assertEquals(vectorValues.size(), numVectors); - OffHeapBinarizedVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) + BinarizedByteVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) .getQuantizedVectorValues(); float[] centroid = qvectorValues.getCentroid(); assertEquals(centroid.length, dims); @@ -159,13 +160,18 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { if (similarityFunction == VectorSimilarityFunction.COSINE) { vectorValues = new ES816BinaryQuantizedVectorsWriter.NormalizedFloatVectorValues(vectorValues); } - - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - float[] corrections = quantizer.quantizeForIndex(vectorValues.vectorValue(), expectedVector, centroid); - assertArrayEquals(expectedVector, qvectorValues.vectorValue()); - assertEquals(corrections.length, qvectorValues.getCorrectiveTerms().length); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); + + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + float[] corrections = quantizer.quantizeForIndex( + vectorValues.vectorValue(docIndexIterator.index()), + expectedVector, + centroid + ); + assertArrayEquals(expectedVector, qvectorValues.vectorValue(docIndexIterator.index())); + assertEquals(corrections.length, qvectorValues.getCorrectiveTerms(docIndexIterator.index()).length); for (int i = 0; i < corrections.length; i++) { - assertEquals(corrections[i], qvectorValues.getCorrectiveTerms()[i], 0.00001f); + assertEquals(corrections[i], qvectorValues.getCorrectiveTerms(docIndexIterator.index())[i], 0.00001f); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java index f607de57e1fd5..ca96e093b7b28 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.TopDocs; @@ -55,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedVectorsFormat(); @@ -91,12 +92,13 @@ public void testSingleVectorCase() throws Exception { try (IndexReader reader = DirectoryReader.open(w)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues("f"); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); assert (vectorValues.size() == 1); - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - assertArrayEquals(vector, vectorValues.vectorValue(), 0.00001f); + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + assertArrayEquals(vector, vectorValues.vectorValue(docIndexIterator.index()), 0.00001f); } TopDocs td = r.searchNearestVectors("f", randomVector(vector.length), 1, null, Integer.MAX_VALUE); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); assertTrue(td.scoreDocs[0].score >= 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index c3fea6c7a189b..437ba1cecc11d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 71c7464657e72..77a7585e3b518 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 02a1b10697907..3d6cfea70d121 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6565a11a860ec..6d205a22433b4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -45,7 +45,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene912Codec() { + indexWriterConfig.setCodec(new Lucene100Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index c8ca3d17de797..21aefd893de70 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -266,7 +266,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("test", luceneDoc.get("value")); } @@ -279,7 +279,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("updated", luceneDoc.get("value")); } @@ -640,7 +640,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { recoverFromTranslog(recoveringEngine, translogHandler, Long.MAX_VALUE); recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); } } @@ -747,7 +747,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits.value); + assertEquals(docs, topDocs.totalHits.value()); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -2010,7 +2010,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc1)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2019,7 +2019,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc2)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2249,7 +2249,7 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValue)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2275,7 +2275,10 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion assertVisibleCount(engine, docDeleted ? 0 : 1); if (docDeleted == false) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2361,7 +2364,10 @@ public void testNonInternalVersioningOnPrimary() throws IOException { if (docDeleted == false) { logger.info("searching for [{}]", lastFieldValue); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2378,7 +2384,7 @@ public void testVersioningPromotedReplica() throws IOException { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); if (totalHits > 0) { // last op wasn't delete assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary)); @@ -2402,7 +2408,10 @@ public void testConcurrentExternalVersioningOnPrimary() throws IOException, Inte assertVisibleCount(engine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2434,7 +2443,7 @@ class OpAndVersion { Engine.Get engineGet = new Engine.Get(true, false, doc.id()); try (Engine.GetResult get = engine.get(engineGet, mappingLookup, documentParser, randomSearcherWrapper())) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2480,7 +2489,7 @@ class OpAndVersion { Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id()), mappingLookup, documentParser, randomSearcherWrapper()) ) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } @@ -3424,7 +3433,7 @@ public void testSkipTranslogReplay() throws IOException { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits.value, equalTo(0L)); + assertThat(topDocs.totalHits.value(), equalTo(0L)); } } } @@ -3514,7 +3523,7 @@ public void testTranslogReplay() throws IOException { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } engine.close(); @@ -3523,7 +3532,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("warm_up"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); @@ -3534,7 +3543,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), equalTo((long) numDocs)); } } @@ -3890,7 +3899,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } operation = appendOnlyPrimary(doc, false, 1, create); retry = appendOnlyPrimary(doc, true, 1, create); @@ -3925,7 +3934,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -3983,7 +3992,7 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } } @@ -4007,7 +4016,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } final boolean create = randomBoolean(); @@ -4047,7 +4056,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4092,12 +4101,12 @@ public void testDoubleDeliveryReplica() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { List ops = readAllOperationsInLucene(engine); @@ -4172,7 +4181,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } index = new Engine.Index( @@ -4194,7 +4203,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4264,7 +4273,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } Engine.Index secondIndexRequestReplica = new Engine.Index( @@ -4285,7 +4294,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -5678,7 +5687,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs()); for (int i = 0; i < search.scoreDocs.length; i++) { - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[i].doc); assertEquals("updated", luceneDoc.get("value")); } int totalNumDocs = numDocs - numDeletes.get(); @@ -6666,7 +6675,7 @@ public void testStoreHonorsLuceneVersion() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader()); - assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().getCreatedVersionMajor()); + assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().createdVersionMajor()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index a3a21fc32e546..b6be13b9f2513 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; @@ -71,22 +70,16 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); - long tolerance; - if (Constants.JRE_IS_MINIMUM_JAVA9) { - // With Java 9, RamUsageTester computes the memory usage of maps as - // the memory usage of an array that would contain exactly all keys - // and values. This is an under-estimation of the actual memory - // usage since it ignores the impact of the load factor and of the - // linked list/tree that is used to resolve collisions. So we use a - // bigger tolerance. - // less than 50% off - tolerance = actualRamBytesUsed / 2; - } else { - // Java 8 is more accurate by doing reflection into the actual JDK classes - // so we give it a lower error bound. - // less than 25% off - tolerance = actualRamBytesUsed / 4; - } + + // Since Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + long tolerance = actualRamBytesUsed / 2; + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index e7e668415cdd4..c0e365909429a 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -69,8 +70,9 @@ public void testPruneAll() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); if (pruneIdField) { assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); @@ -151,8 +153,9 @@ public void testPruneSome() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -192,8 +195,9 @@ public void testPruneNone() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index a6e56c4137028..49036324e722e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -80,7 +80,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_8_0_0; + segment.version = Version.LUCENE_9_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index b7793a644f8b8..9d0a9cdeb1968 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -113,7 +113,7 @@ public void testSingleValueAllSet() throws Exception { TopFieldDocs topDocs; SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -123,7 +123,7 @@ public void testSingleValueAllSet() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -193,14 +193,14 @@ public void testMultiValueAllSet() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -258,7 +258,7 @@ public void testSortMultiValuesFields() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -279,7 +279,7 @@ public void testSortMultiValuesFields() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index ac77f147a7ce6..48d6cabefe345 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -262,10 +262,10 @@ public void testActualMissingValue(boolean reverse) throws IOException { randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField) ); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); @@ -321,10 +321,10 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { new Sort(sortField) ); - assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (first == false && docValue != null) { @@ -414,7 +414,7 @@ public void testNestedSorting(MultiValueMode sortMode) throws IOException { assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { - String[] sVals = searcher.doc(child).getValues("text"); + String[] sVals = searcher.storedFields().document(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; @@ -498,15 +498,11 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); @@ -522,8 +518,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -534,8 +528,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -547,8 +539,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); @@ -564,8 +554,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index cb6732ce8bb7d..aa23dc6da19df 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -108,7 +108,6 @@ public int compare(OrdAndId o1, OrdAndId o2) { for (Long ord : docOrds) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { assertFalse(singleOrds.advanceExact(i)); @@ -257,7 +256,6 @@ private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 86ddbbbc97599..81be71aec23c8 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -47,7 +47,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + r1 -> assertThat(r1.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(0, 1); @@ -57,7 +57,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + r2 -> assertThat(r2.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(1, 1); @@ -72,7 +72,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + r3 -> assertThat(r3.getHits().getTotalHits().value(), equalTo(5L)) ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index ddba993fd41cc..4aa983a78b07b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -121,8 +122,15 @@ public void testSort() throws IOException { BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [false]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [true]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 2c78f5f7fee20..f55d213bea581 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -38,25 +38,25 @@ public void testDoubleIndexingSameDoc() throws Exception { }, reader -> { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 48879cdd0d77e..140137015d98a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -120,9 +121,19 @@ public void testSort() throws IOException { DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4.2]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 4284bc00cfc15..0182da8ade48a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -78,5 +78,4 @@ public void testUsingEnabledSettingThrows() { ex.getMessage() ); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 72055940b8970..4cc447d97291c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.List; + public class IdFieldTypeTests extends ESTestCase { public void testRangeQuery() { @@ -49,7 +51,7 @@ public void testTermsQuery() { Mockito.when(context.indexVersionCreated()).thenReturn(IndexVersion.current()); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); - assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); + assertEquals(new TermInSetQuery("_id", List.of(Uid.encodeId("id"))), query); } public void testIsAggregatable() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index 4170adf0a8508..8f209fb78fc64 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -230,13 +230,8 @@ public void testAutomatonFromIPv6Group() throws UnknownHostException { } private static CompiledAutomaton compileAutomaton(Automaton automaton) { - CompiledAutomaton compiledAutomaton = new CompiledAutomaton( - automaton, - null, - false, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, - true - ); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton, false, false, true); return compiledAutomaton; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index acbfe8b8f9b38..281d2993fa29c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -125,16 +126,17 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); + StoredFields storedFields = reader.storedFields(); assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.1\"]}") ); assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.2\"]}") ); assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.4\"]}") ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index b4c7ea0ed9508..e3bdb3d45818f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -110,7 +110,7 @@ protected TokenStream normalize(String fieldName, TokenStream in) { public void testTermsQuery() { MappedFieldType ft = new KeywordFieldType("field"); - BytesRef[] terms = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + List terms = List.of(new BytesRef("foo"), new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index f2e788918010c..57d52991a6442 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -115,9 +116,19 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"a\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"b\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"d\"]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 40357399cab5b..a8cb4d51c5efa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; @@ -132,9 +133,19 @@ public void testSort() throws IOException { LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 5360215b5b05b..836b791af23c1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -80,7 +80,7 @@ public void testBytesAndNumericRepresentation() throws Exception { "field10" ); CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - searcher.doc(0, fieldsVisitor); + searcher.storedFields().document(0, fieldsVisitor); fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index c8fcf486068c4..86914cfe9ced7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -879,7 +879,7 @@ protected TokenStreamComponents createComponents(String fieldName) { IndexSearcher searcher = newSearcher(ir); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "Prio 1"); TopDocs td = searcher.search(queryBuilder.toQuery(searchExecutionContext), 1); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); }); Exception e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 5a34886d73db7..c8d7ad8127b55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -81,7 +81,7 @@ public void testTermQuery() { public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery(ft.name(), List.of(new BytesRef("key\0value1"), new BytesRef("key\0value2"))); List terms = new ArrayList<>(); terms.add("value1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index f494af259c504..b52192d6e47b4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -23,8 +23,6 @@ import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { private LeafOrdinalsFieldData delegate; @@ -121,7 +119,8 @@ public void testNextOrd() throws IOException { docValues.advanceExact(0); int retrievedOrds = 0; - for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); assertTrue(0 <= ord && ord < 10); retrievedOrds++; @@ -190,9 +189,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (index == documentOrds.length) { - return NO_MORE_ORDS; - } + assertTrue(index < documentOrds.length); return documentOrds[index++]; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java index b2ffb779be00b..de4ab0bc5df30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.test.ESTestCase; @@ -25,7 +26,7 @@ public void testEmptyVectors() throws IOException { wrap(new float[0][0]), wrapMagnitudes(new float[0]) ); - assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.iterator().nextDoc()); } public void testRandomVectors() throws IOException { @@ -47,9 +48,10 @@ public void testRandomVectors() throws IOException { wrapMagnitudes(magnitudes) ); + KnnVectorValues.DocIndexIterator iterator = normalizedCosineFloatVectorValues.iterator(); for (int i = 0; i < numVectors; i++) { - assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); - assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(i, iterator.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(iterator.index()), (float) 1e-6); assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index c007156c806eb..baade683a90fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -208,7 +208,41 @@ public int size() { } @Override - public byte[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public byte[] vectorValue(int ord) { + assert ord == index; for (int i = 0; i < byteVector.length; i++) { byteVector[i] = (byte) vectors[index][i]; } @@ -216,25 +250,12 @@ public byte[] vectorValue() { } @Override - public int docID() { - return index; - } - - @Override - public int nextDoc() { + public ByteVectorValues copy() { throw new UnsupportedOperationException(); } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(byte[] floats) throws IOException { + public VectorScorer scorer(byte[] floats) { throw new UnsupportedOperationException(); } }; @@ -256,30 +277,51 @@ public int size() { } @Override - public float[] vectorValue() { - return vectors[index]; - } - - @Override - public int docID() { - return index; + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() throws IOException { + return advance(index + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; } @Override - public int nextDoc() { - return advance(index + 1); + public float[] vectorValue(int ord) { + assert ord == index; + return vectors[index]; } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; + public FloatVectorValues copy() { + throw new UnsupportedOperationException(); } @Override - public VectorScorer scorer(float[] floats) throws IOException { + public VectorScorer scorer(float[] floats) { throw new UnsupportedOperationException(); } }; diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 0fa8f70525e8a..e9ef3ac8ad748 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -207,15 +207,15 @@ public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); - BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.FILTER)); + assertThat(booleanClause.query(), instanceOf(BooleanQuery.class)); + BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.query(); // we didn't set minimum should match initially, there are no should clauses so it should be 0 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(innerBooleanQuery.clauses().size(), equalTo(1)); BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0); - assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class)); + assertThat(innerBooleanClause.occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(innerBooleanClause.query(), instanceOf(MatchAllDocsQuery.class)); } public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java index daaa0c4653d7a..f09e524faf8ff 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java @@ -143,8 +143,8 @@ public void testWildcardFieldPattern() throws Exception { BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(CombinedFieldQuery.class)); - assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(1).query(), instanceOf(CombinedFieldQuery.class)); }); } @@ -164,8 +164,8 @@ public void testOperator() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(minimumShouldMatch)); assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(occur)); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(occur)); } public void testQueryBoost() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index 49c646d243a55..ef5088eef84a7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.document.LatLonPoint; -import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.LongField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -81,7 +81,7 @@ protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Que } else { // NANOSECONDS pivotLong = pivotVal.getNanos(); } - expectedQuery = LongPoint.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); + expectedQuery = LongField.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); } assertEquals(expectedQuery, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index ba3350bca8e2c..afa8fc1529604 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -68,7 +68,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, Collection childFields = context.getMatchingFieldNames(field + ".*"); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); @@ -87,7 +87,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, assertThat(booleanQuery.clauses().size(), equalTo(fields.size())); for (int i = 0; i < fields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index e471858ce9c5a..79f2dcb61e508 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -37,7 +37,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; -import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.hasSize; public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase { @@ -101,7 +100,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que // all queries except the last should be TermQuery or SynonymQuery final Set allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .collect(Collectors.toSet()); assertThat( allQueriesExceptLast, @@ -122,13 +121,13 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que }); // the last query should be PrefixQuery - final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).getQuery(); + final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).query(); assertThat(shouldBePrefixQuery, instanceOf(PrefixQuery.class)); if (queryBuilder.minimumShouldMatch() != null) { final int optionalClauses = (int) booleanQuery.clauses() .stream() - .filter(clause -> clause.getOccur() == BooleanClause.Occur.SHOULD) + .filter(clause -> clause.occur() == BooleanClause.Occur.SHOULD) .count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); @@ -266,10 +265,12 @@ private static void assertBooleanQuery(Query actual, List expectedClauseQ assertThat(actual, instanceOf(BooleanQuery.class)); final BooleanQuery actualBooleanQuery = (BooleanQuery) actual; assertThat(actualBooleanQuery.clauses(), hasSize(expectedClauseQueries.size())); - assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { - final Query clauseQuery = actualBooleanQuery.clauses().get(i).getQuery(); + BooleanClause clause = actualBooleanQuery.clauses().get(i); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + final Query clauseQuery = clause.query(); + assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e71485647913c..ba46bf76efbfe 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -163,7 +164,7 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, // calculate expected minimumShouldMatch value int optionalClauses = 0; for (BooleanClause c : bq.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } @@ -527,9 +528,9 @@ public void testAliasWithSynonyms() throws Exception { public void testMaxBooleanClause() { MatchQueryParser query = new MatchQueryParser(createSearchExecutionContext()); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraph(40))); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); } private static class MockGraphAnalyzer extends Analyzer { @@ -567,7 +568,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos } /** - * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * Creates a graph token stream with {@link IndexSearcher#getMaxClauseCount()} * expansions at the last position. **/ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { @@ -578,7 +579,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms( tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); - for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount(); i++) { tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); } return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 7209ee77cb70d..7c21751b4b332 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -246,7 +246,7 @@ private static Fields generateFields(String[] fieldNames, String text) throws IO for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } @Override @@ -255,7 +255,7 @@ protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; for (BooleanClause booleanClause : booleanQuery) { - if (booleanClause.getQuery() instanceof MoreLikeThisQuery moreLikeThisQuery) { + if (booleanClause.query() instanceof MoreLikeThisQuery moreLikeThisQuery) { assertThat(moreLikeThisQuery.getLikeFields().length, greaterThan(0)); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index f4405229e857e..3dcf00e4f22f5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -749,7 +749,7 @@ public void testToQueryRegExpQueryTooComplex() throws Exception { TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10000 effort.")); } @@ -775,7 +775,7 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10 effort.")); } @@ -925,10 +925,10 @@ public void testToQueryBooleanQueryMultipleBoosts() throws Exception { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(0).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(1).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(0).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(1).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); } public void testToQueryPhraseQueryBoostAndSlop() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 79c4c9ec5bb20..a84cd60a99e45 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -315,7 +315,7 @@ protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query private static int shouldClauses(BooleanQuery query) { int result = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { result++; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index cb314472e35b3..283bbbc9b100d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -18,7 +18,6 @@ import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; @@ -220,7 +219,7 @@ public void testToQueryInnerTermQuery() throws IOException { assertThat(prefixQuery.getPrefix().text(), equalTo("f")); assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); - assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertThat(rewrite.getMaxExpansions(), equalTo(IndexSearcher.getMaxClauseCount())); assertTrue(rewrite.isHardLimit()); } } @@ -265,8 +264,8 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { iw.addDocument(singleton(new TextField("body", "foo bar" + Integer.toString(i), Field.Store.NO))); } try (IndexReader reader = iw.getReader()) { - int origBoolMaxClauseCount = BooleanQuery.getMaxClauseCount(); - BooleanQuery.setMaxClauseCount(1); + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); try { QueryBuilder queryBuilder = new SpanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "bar")); IndexSearcher searcher = newSearcher(reader); @@ -274,7 +273,7 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { - BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 3edf150688384..589019093075d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -238,7 +238,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchField.totalHits.value, equalTo(3L)); + assertThat(topDocsWithMinimumShouldMatchField.totalHits.value(), equalTo(3L)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[1].doc, equalTo(3)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[2].doc, equalTo(4)); @@ -249,7 +249,7 @@ public void testDoToQuery() throws Exception { ).doToQuery(context); searcher = newSearcher(ir); TopDocs topDocsWithMinimumShouldMatch = searcher.search(queryWithMinimumShouldMatch, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocsWithMinimumShouldMatch.totalHits.value, equalTo(5L)); + assertThat(topDocsWithMinimumShouldMatch.totalHits.value(), equalTo(5L)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[1].doc, equalTo(2)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[2].doc, equalTo(3)); @@ -266,7 +266,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value, equalTo(1L)); + assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value(), equalTo(1L)); assertThat(topDocsWithMinimumShouldMatchNegative.scoreDocs[0].doc, equalTo(5)); } } @@ -310,7 +310,7 @@ public void testDoToQuery_msmScriptField() throws Exception { .doToQuery(context); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 47c75ee38ee1b..49b1362436ec7 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -305,7 +305,7 @@ public void testConflictingOpsOnReplica() throws Exception { for (IndexShard shard : shards) { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("f", "2")), 10); - assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value); + assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 5308f5f5d1f04..e8652e3a0f6d6 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -225,7 +225,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(7)); @@ -240,7 +240,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(13)); @@ -258,7 +258,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(12)); @@ -273,7 +273,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(3)); @@ -289,7 +289,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, 127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); @@ -305,7 +305,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); @@ -340,7 +340,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java index cecf20360178c..ca176a5402c06 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java @@ -73,7 +73,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java index 49d9c0eedd121..60e7473a2101a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java @@ -76,7 +76,7 @@ protected void assertAvgScoreMode( ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index e088e8569bf8a..cd6f596cfda05 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -310,7 +311,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a")); @@ -332,7 +333,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o")); @@ -358,7 +359,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m")); @@ -620,42 +621,43 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + StoredFields storedFields = searcher.storedFields(); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); // Specific genre { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -664,16 +666,16 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -681,26 +683,26 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -717,10 +719,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); sortBuilder.order(SortOrder.DESC); @@ -730,10 +732,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); } @@ -755,10 +757,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); sortBuilder.order(SortOrder.DESC); @@ -768,10 +770,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -785,26 +787,26 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java index 8355f0156d0c3..7c7313d6b6516 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java @@ -52,7 +52,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( "field", @@ -82,7 +82,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); wrap.close(); assertFalse("wrapped reader is closed", wrap.getIndexReader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -106,7 +106,7 @@ public void testIsCacheable() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( @@ -153,7 +153,7 @@ public void testAlwaysWrapWithFieldUsageTrackingDirectoryReader() throws IOExcep writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); CheckedFunction wrapper = directoryReader -> directoryReader; try ( diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index f15506676dc39..d480f7bfc8d7f 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2793,9 +2793,9 @@ public void testReaderWrapperIsUsed() throws IOException { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); @@ -2815,9 +2815,9 @@ public void testReaderWrapperIsUsed() throws IOException { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 0); + assertEquals(search.totalHits.value(), 0); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } try (Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1"))) { assertTrue(getResult.exists()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6fa405c091da1..ccf0bbebcc354 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -437,9 +437,8 @@ public void testLotsOfThreads() throws Exception { ) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.storedFields() + .document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 70e5143552235..aa89f31757ef4 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; @@ -172,6 +173,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole int doc; int numActual = 0; int lastDoc = 0; + StoredFields storedFields = reader.storedFields(); while ((doc = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { lastDoc = doc; while (shard_id.nextDoc() < doc) { @@ -181,7 +183,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = storedFields.document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index b05f287e628a3..fa5f713dfd672 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -144,7 +144,7 @@ public double execute( 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); r.close(); @@ -236,7 +236,7 @@ public double execute( searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 394ce35c6b493..38e6ca0be0647 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -33,10 +33,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactoryTests extends ESTestCase { + final PreLoadExposingFsDirectoryFactory fsDirectoryFactory = new PreLoadExposingFsDirectoryFactory(); + public void testPreload() throws IOException { doTestPreload(); doTestPreload("nvd", "dvd", "tim"); @@ -60,10 +66,11 @@ public void testPreload() throws IOException { assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tmp", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.fdt__0.tmp", newIOContext(random()))); MMapDirectory delegate = hybridDirectory.getDelegate(); - assertThat(delegate, Matchers.instanceOf(FsDirectoryFactory.PreLoadMMapDirectory.class)); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) delegate; - assertTrue(preLoadMMapDirectory.useDelegate("foo.dvd")); - assertTrue(preLoadMMapDirectory.useDelegate("foo.tmp")); + assertThat(delegate, Matchers.instanceOf(MMapDirectory.class)); + var func = fsDirectoryFactory.preLoadFuncMap.get(delegate); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); + fsDirectoryFactory.preLoadFuncMap.clear(); } } @@ -72,7 +79,21 @@ private Directory newDirectory(Settings settings) throws IOException { Path tempDir = createTempDir().resolve(idxSettings.getUUID()).resolve("0"); Files.createDirectories(tempDir); ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(idxSettings.getIndex(), 0)); - return new FsDirectoryFactory().newDirectory(idxSettings, path); + return fsDirectoryFactory.newDirectory(idxSettings, path); + } + + static class PreLoadExposingFsDirectoryFactory extends FsDirectoryFactory { + + // expose for testing + final Map> preLoadFuncMap = new HashMap<>(); + + @Override + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + var preLoadFunc = FsDirectoryFactory.getPreloadFunc(preLoadExtensions); + mMapDirectory.setPreload(preLoadFunc); + preLoadFuncMap.put(mMapDirectory, preLoadFunc); + return mMapDirectory; + } } private void doTestPreload(String... preload) throws IOException { @@ -85,26 +106,23 @@ private void doTestPreload(String... preload) throws IOException { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); var mmapDirectory = FilterDirectory.unwrap(directory); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); if (preload.length == 0) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertFalse(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertTrue(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); } else { - assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; + var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { - assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); - assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); + assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } - assertFalse(preLoadMMapDirectory.useDelegate("XXX")); - assertFalse(preLoadMMapDirectory.getPreload()); - preLoadMMapDirectory.close(); - expectThrows( - AlreadyClosedException.class, - () -> preLoadMMapDirectory.getDelegate().openInput("foo.tmp", IOContext.DEFAULT) - ); + assertFalse(func.test("XXX", newIOContext(random()))); + mmapDirectory.close(); + expectThrows(AlreadyClosedException.class, () -> mmapDirectory.openInput("foo.tmp", IOContext.DEFAULT)); } } expectThrows( @@ -148,7 +166,7 @@ private void doTestStoreDirectory(Path tempDir, String typeSettingValue, IndexMo ); break; case FS: - if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (Constants.JRE_IS_64BIT) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); } else { assertTrue(directory.toString(), directory instanceof NIOFSDirectory); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index ebffd54a742ce..4f73672471942 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -70,8 +70,9 @@ public String toString(String field) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override @@ -348,16 +349,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return weight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - scorerCalled = true; - return weight.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { scorerSupplierCalled = true; - return weight.scorerSupplier(context); + ScorerSupplier inScorerSupplier = weight.scorerSupplier(context); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + scorerCalled = true; + return inScorerSupplier.get(leadCost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index dfd71bba0208c..773c660caa1c6 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -421,8 +421,8 @@ public BytesReference get() { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + assertEquals(1, topDocs.totalHits.value()); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index baa35101c1c87..1c1a9a645b99b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -114,11 +114,11 @@ private > void assertSearchCollapse( TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(sortField.getField(), collapseTopFieldDocs.field); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); + assertEquals(totalHits, topDocs.totalHits.value()); Object currentValue = null; int topDocsIndex = 0; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 90adb2d0ffcce..30c68fe708c83 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -49,11 +49,11 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; SegmentSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -62,7 +62,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } @@ -140,10 +140,10 @@ private > void assertSearchCollapse( TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(totalHits, topDocs.totalHits.value()); Set seen = new HashSet<>(); // collapse field is the last sort diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 71a2703555318..076cd0af1bf26 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -88,7 +88,7 @@ public void testDismaxQuery() throws IOException { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -110,7 +110,7 @@ public void testDismaxQuery() throws IOException { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -120,8 +120,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("unknown_field")) { assertThat(termStates.docFreq(), equalTo(0)); @@ -131,7 +131,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), greaterThan(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo((long) iters + username.length)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo((long) iters + username.length)); } { // test with an unknown field and an unknown term @@ -140,13 +140,13 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(0L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(0L)); } { // test with an unknown field and a term that is present in only one field @@ -155,8 +155,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("username")) { assertThat(termStates.docFreq(), equalTo(1)); @@ -166,7 +166,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), equalTo(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(1L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(1L)); } reader.close(); w.close(); @@ -250,7 +250,7 @@ public void testMinTTF() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); @@ -298,7 +298,7 @@ public void testRandomFields() throws IOException { String[] fieldNames = fields.keySet().toArray(new String[0]); Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fieldNames, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); - assertTrue(search.totalHits.value > 0); + assertTrue(search.totalHits.value() > 0); assertTrue(search.scoreDocs.length > 0); } reader.close(); @@ -332,7 +332,7 @@ public void testMissingFields() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); reader.close(); w.close(); diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index f5266568e6fdf..126641037fde7 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -141,7 +141,7 @@ private void assertHighlightOneDoc( IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index 239c90bdee2fd..78cd90e8f5269 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -336,10 +337,11 @@ private void assertAllDocs(Set terms, Function fun withIndexSearcher(searcher -> { for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { IndexReader reader = leafReaderContext.reader(); + StoredFields storedFields = reader.storedFields(); DocIdSetIterator docIdSetIterator = DocIdSetIterator.all(reader.maxDoc()); ScriptTermStats termStats = new ScriptTermStats(searcher, leafReaderContext, docIdSetIterator::docID, terms); while (docIdSetIterator.nextDoc() <= reader.maxDoc()) { - String docId = reader.document(docIdSetIterator.docID()).get("id"); + String docId = storedFields.document(docIdSetIterator.docID()).get("id"); if (expectedValues.containsKey(docId)) { assertThat(function.apply(termStats), expectedValues.get(docId)); } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 6a6f5dc44ef6f..663b39d116913 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -714,11 +714,8 @@ public void testMultiValuedOrds() throws Exception { @Override public long nextOrd() { - if (i < array[doc].length) { - return array[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < array[doc].length; + return array[doc][i++]; } @Override @@ -762,7 +759,8 @@ private void verifySortedSet(Supplier supplier, int maxDoc) } int expected = -1; if (values.advanceExact(i)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int j = 0; j < values.docValueCount(); j++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { @@ -810,7 +808,8 @@ private void verifySortedSet( if (++count > maxChildren) { break; } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 5e1296c354015..aa2e76f512cc8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; @@ -105,14 +106,17 @@ public void testCancellableCollector() throws IOException { true ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(reader.numDocs())); searcher.addQueryCancellation(cancellation); - expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager())); + expectThrows( + TaskCancelledException.class, + () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())) + ); searcher.removeQueryCancellation(cancellation); - Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits2, equalTo(reader.numDocs())); } @@ -203,15 +207,17 @@ public void testExitableDirectoryReaderVectors() throws IOException { cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues vectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); cancelled.set(true); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); // On the first doc when already canceled, it throws - expectThrows(TaskCancelledException.class, vectorValues::nextDoc); + expectThrows(TaskCancelledException.class, iterator::nextDoc); cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues uncancelledVectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); + uncancelledVectorValues.iterator(); cancelled.set(true); searcher.removeQueryCancellation(cancellation); // On the first doc when already canceled, it throws, but with the cancellation removed, it should not - uncancelledVectorValues.nextDoc(); + iterator.nextDoc(); } private static class PointValuesIntersectVisitor implements PointValues.IntersectVisitor { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 642804730a144..5dc07a41b3f8c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -772,7 +772,7 @@ public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) ), (response) -> { SearchHits hits = response.getHits(); - assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getTotalHits().value(), numDocs); assertEquals(hits.getHits().length, 2); int index = 0; for (SearchHit hit : hits.getHits()) { @@ -2505,7 +2505,7 @@ public void testWaitOnRefresh() throws ExecutionException, InterruptedException ); PlainActionFuture future = new PlainActionFuture<>(); service.executeQueryPhase(request, task, future.delegateFailure((l, r) -> { - assertEquals(1, r.queryResult().getTotalHits().value); + assertEquals(1, r.queryResult().getTotalHits().value()); l.onResponse(null); })); future.get(); @@ -2714,7 +2714,7 @@ public void testEnableSearchWorkerThreads() throws IOException { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } try { @@ -2725,7 +2725,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .get(); assertTrue(response.isAcknowledged()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNull(searchContext.searcher().getExecutor()); + assertFalse(searchContext.searcher().hasExecutor()); } } finally { // reset original default setting @@ -2735,7 +2735,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .setPersistentSettings(Settings.builder().putNull(SEARCH_WORKER_THREADS_ENABLED.getKey()).build()) .get(); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } } } @@ -2778,7 +2778,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2795,7 +2795,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", @@ -2808,7 +2808,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2825,7 +2825,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", @@ -2838,9 +2838,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" @@ -2854,9 +2854,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", @@ -2877,9 +2877,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", @@ -2899,7 +2899,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2916,7 +2916,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index 6b4618bf3257a..ac5d886c9ba10 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -33,14 +33,8 @@ import static org.hamcrest.Matchers.equalTo; public class MultiBucketCollectorTests extends ESTestCase { - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -247,7 +241,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorable scorer = new ScoreAndDoc(); + Scorable scorer = new Score(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); @@ -275,4 +269,78 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { assertFalse(setScorerCalled1.get()); assertFalse(setScorerCalled2.get()); } + + public void testCacheScores() throws IOException { + ScoringBucketCollector scoringBucketCollector1 = new ScoringBucketCollector(); + ScoringBucketCollector scoringBucketCollector2 = new ScoringBucketCollector(); + + DummyScorable scorable = new DummyScorable(); + + // First test the tester + LeafBucketCollector leafBucketCollector1 = scoringBucketCollector1.getLeafCollector(null); + LeafBucketCollector leafBucketCollector2 = scoringBucketCollector2.getLeafCollector(null); + leafBucketCollector1.setScorer(scorable); + leafBucketCollector2.setScorer(scorable); + leafBucketCollector1.collect(0, 0); + leafBucketCollector2.collect(0, 0); + assertEquals(2, scorable.numScoreCalls); + + // reset + scorable.numScoreCalls = 0; + LeafBucketCollector leafBucketCollector = MultiBucketCollector.wrap( + randomBoolean(), + Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) + ).getLeafCollector(null); + leafBucketCollector.setScorer(scorable); + leafBucketCollector.collect(0, 0); + // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching + assertEquals(1, scorable.numScoreCalls); + } + + private static class ScoringBucketCollector extends BucketCollector { + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // needs scores + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException { + return new ScoringLeafBucketCollector(); + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + } + + private static class ScoringLeafBucketCollector extends LeafBucketCollector { + + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.scorable = scorer; + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + scorable.score(); + } + } + + private static class DummyScorable extends Scorable { + int numScoreCalls = 0; + + @Override + public float score() throws IOException { + numScoreCalls++; + return 42f; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 879c7e6aeff7f..eb5fa734a8c91 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -80,12 +80,12 @@ protected void indexData() throws Exception { indexRandom(true, docs); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { - long totalOnOne = resp.getHits().getTotalHits().value; + long totalOnOne = resp.getHits().getTotalHits().value(); assertThat(totalOnOne, is(15L)); }); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; + long totalOnTwo = resp.getHits().getTotalHits().value(); assertThat(totalOnTwo, is(12L)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index bbd12726ac4e3..28a032e7281e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -2571,19 +2571,19 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new FieldExistsQuery("keyword")), dataset, () -> { @@ -2598,13 +2598,13 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 71b93888ba243..8a72f8af7035c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -15,14 +15,29 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafMetaData; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.index.TermVectors; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -54,8 +69,6 @@ import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { @@ -71,11 +84,8 @@ static class ClassAndName { private IndexReader indexReader; @Before - public void setUpMocks() { - indexReader = mock(IndexReader.class); - IndexReaderContext indexReaderContext = mock(IndexReaderContext.class); - when(indexReaderContext.leaves()).thenReturn(List.of()); - when(indexReader.getContext()).thenReturn(indexReaderContext); + public void set() { + indexReader = new DummyReader(); } public void testRandomLong() throws IOException { @@ -425,4 +435,126 @@ private static void createListCombinations( } } } + + static class DummyReader extends LeafReader { + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } + + @Override + public Terms terms(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNormValues(String field) throws IOException { + return null; + } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return null; + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return null; + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return null; + } + + @Override + public void searchNearestVectors(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public void searchNearestVectors(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public FieldInfos getFieldInfos() { + return null; + } + + @Override + public Bits getLiveDocs() { + return null; + } + + @Override + public PointValues getPointValues(String field) throws IOException { + return null; + } + + @Override + public void checkIntegrity() throws IOException { + + } + + @Override + public LeafMetaData getMetaData() { + return null; + } + + @Override + public TermVectors termVectors() throws IOException { + return null; + } + + @Override + public int numDocs() { + return 0; + } + + @Override + public int maxDoc() { + return 0; + } + + @Override + public StoredFields storedFields() throws IOException { + return null; + } + + @Override + protected void doClose() throws IOException { + + } + + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 4b9a72bacc97d..3d1ed0704acf9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -258,7 +259,7 @@ public void testNumericSorted() { } private static IndexReader mockIndexReader(int maxDoc, int numDocs) { - IndexReader reader = mock(IndexReader.class); + IndexReader reader = mock(LeafReader.class); when(reader.hasDeletions()).thenReturn(maxDoc - numDocs > 0); when(reader.maxDoc()).thenReturn(maxDoc); when(reader.numDocs()).thenReturn(numDocs); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 26e643510859c..06f1db352e8f0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -42,9 +42,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (i == ords.length) { - return NO_MORE_ORDS; - } + assert i < ords.length; return ords[i++]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 4a08295bd7bcd..48aabb61371e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -414,7 +414,7 @@ public void testWithNestedScoringAggregations() throws IOException { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); TotalHits hits = topHits.getHits().getTotalHits(); assertNotNull(hits); - assertThat(hits.value, equalTo(counter)); + assertThat(hits.value(), equalTo(counter)); assertThat(topHits.getHits().getMaxScore(), equalTo(Float.NaN)); counter += 1; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 2c76ed96da488..b267cb2e656b6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1730,8 +1730,8 @@ private void assertNestedTopHitsScore(InternalMultiBucketAggregation terms int ptr = 9; for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getTotalHits().value, equalTo((long) ptr)); - assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation); + assertThat(topHits.getHits().getTotalHits().value(), equalTo((long) ptr)); + assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation()); if (withScore) { assertThat(topHits.getHits().getMaxScore(), equalTo(1f)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 87eb2bdc29fbd..07d535167b318 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -220,8 +220,8 @@ protected void assertReduced(InternalTopHits reduced, List inpu TotalHits.Relation relation = TotalHits.Relation.EQUAL_TO; for (int input = 0; input < inputs.size(); input++) { SearchHits internalHits = inputs.get(input).getHits(); - totalHits += internalHits.getTotalHits().value; - if (internalHits.getTotalHits().relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += internalHits.getTotalHits().value(); + if (internalHits.getTotalHits().relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { relation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } maxScore = max(maxScore, internalHits.getMaxScore()); @@ -379,14 +379,14 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { case 2 -> size += between(1, 100); case 3 -> topDocs = new TopDocsAndMaxScore( new TopDocs( - new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + new TotalHits(topDocs.topDocs.totalHits.value() + between(1, 100), topDocs.topDocs.totalHits.relation()), topDocs.topDocs.scoreDocs ), topDocs.maxScore + randomFloat() ); case 4 -> { TotalHits totalHits = new TotalHits( - searchHits.getTotalHits().value + between(1, 100), + searchHits.getTotalHits().value() + between(1, 100), randomFrom(TotalHits.Relation.values()) ); searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 0c5217ded982b..6fb147e3ffc89 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -55,7 +55,7 @@ public void testTopLevel() throws Exception { result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); - assertEquals(3L, searchHits.getTotalHits().value); + assertEquals(3L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); @@ -65,7 +65,7 @@ public void testTopLevel() throws Exception { public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); - assertEquals(0L, searchHits.getTotalHits().value); + assertEquals(0L, searchHits.getTotalHits().value()); assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } @@ -89,27 +89,27 @@ public void testInsideTerms() throws Exception { // The "a" bucket TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top"); SearchHits searchHits = (hits).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } @@ -179,7 +179,7 @@ public void testSetScorer() throws Exception { .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); - assertEquals(3, result.getHits().getTotalHits().value); + assertEquals(3, result.getHits().getTotalHits().value()); reader.close(); directory.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index e47614145e924..7fa2732191cd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -88,12 +88,9 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (consumed) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - consumed = true; - return 0; - } + assert consumed == false; + consumed = true; + return 0; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index bc6b72d9ddd3e..2a36887cc459a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -124,11 +124,8 @@ public long getValueCount() { @Override public long nextOrd() { - if (i < ords[doc].length) { - return ords[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < ords[doc].length; + return ords[doc][i++]; } @Override @@ -153,10 +150,8 @@ public int docValueCount() { for (int ord : ords[i]) { assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java index caf8df55ce528..71fd3a4761cbe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java @@ -201,7 +201,10 @@ public void testCollectFromMiddle() throws IOException { BucketCollector collector = getBucketCollector(2 * DOC_COUNTS); // skip the first doc of segment 1 and 2 - indexSearcher.search(SortedSetDocValuesField.newSlowSetQuery("_tsid", new BytesRef("tsid0"), new BytesRef("tsid1")), collector); + indexSearcher.search( + SortedSetDocValuesField.newSlowSetQuery("_tsid", List.of(new BytesRef("tsid0"), new BytesRef("tsid1"))), + collector + ); collector.postCollection(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 34ee0eec101b6..9957d8c92b955 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,6 +46,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -76,7 +76,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -251,7 +250,7 @@ public void testConcurrentCollection() throws Exception { Integer.MAX_VALUE, 1 ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size @@ -367,7 +366,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); TopDocs topDocs = searcher.search(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("foo", "bar"))), 3f), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3f, topDocs.scoreDocs[0].score, 0); @@ -406,7 +405,7 @@ private static void assertSlices(LeafSlice[] slices, int numDocs, int numThreads int sumDocs = 0; assertThat(slices.length, lessThanOrEqualTo(numThreads)); for (LeafSlice slice : slices) { - int sliceDocs = Arrays.stream(slice.leaves).mapToInt(l -> l.reader().maxDoc()).sum(); + int sliceDocs = slice.getMaxDocs(); assertThat(sliceDocs, greaterThanOrEqualTo((int) (0.1 * numDocs))); sumDocs += sliceDocs; } @@ -497,9 +496,14 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { contextIndexSearcher.throwTimeExceededException(); - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + Scorer scorer = new ConstantScoreScorer( + score(), + scoreMode, + DocIdSetIterator.all(context.reader().maxDoc()) + ); + return new DefaultScorerSupplier(scorer); } @Override @@ -583,7 +587,10 @@ public Query rewrite(IndexSearcher indexSearcher) { return null; } }; - Integer hitCount = contextIndexSearcher.search(testQuery, new TotalHitCountCollectorManager()); + Integer hitCount = contextIndexSearcher.search( + testQuery, + new TotalHitCountCollectorManager(contextIndexSearcher.getSlices()) + ); assertEquals(0, hitCount.intValue()); assertTrue(contextIndexSearcher.timeExceeded()); } finally { @@ -747,15 +754,9 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); + return weight.scorerSupplier(context); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index c42f3156c6d29..b728d40900570 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -125,14 +125,14 @@ public void testManagerWithSearcher() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); assertEquals("SimpleTopScoreDocCollector", result.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index e868293ef4a1c..98d79df63db8e 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -10,14 +10,12 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Matches; import org.apache.lucene.search.MatchesIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -34,10 +32,6 @@ private static class FakeScorer extends Scorer { public float maxScore, minCompetitiveScore; - protected FakeScorer(Weight weight) { - super(weight); - } - @Override public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); @@ -75,22 +69,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return Explanation.match(1, "fake_description"); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - FakeScorer fakeScorer = new FakeScorer(this); - fakeScorer.maxScore = 42f; - return fakeScorer; - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - Weight weight = this; return new ScorerSupplier() { private long cost = 0; @Override public Scorer get(long leadCost) { - return new Scorer(weight) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return null; @@ -187,23 +173,17 @@ public Iterator iterator() { } public void testPropagateMinCompetitiveScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } public void testPropagateMaxScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; assertEquals(42f, profileScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS), 0f); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 9df03905f7be2..44c46e3f692ba 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -241,11 +241,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index d33c033ecef2d..de6218e912953 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -120,7 +120,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -133,7 +133,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } } @@ -150,7 +150,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -165,7 +165,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numField2Docs, result.aggs.intValue()); } } @@ -184,7 +184,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -199,7 +199,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } } @@ -218,7 +218,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -235,7 +235,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // post_filter is not applied to aggs assertEquals(reader.maxDoc(), result.aggs.intValue()); } @@ -251,7 +251,7 @@ public void testMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -266,7 +266,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -279,7 +279,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -292,7 +292,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -306,7 +306,7 @@ public void testMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -322,7 +322,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // min_score is applied to aggs as well as top docs assertEquals(numField2Docs, result.aggs.intValue()); } @@ -338,7 +338,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -353,7 +353,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -370,7 +370,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -385,7 +385,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -398,7 +398,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -411,7 +411,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -427,7 +427,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -443,7 +443,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); assertEquals(numField3Docs, result.aggs.intValue()); } { @@ -458,7 +458,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -473,7 +473,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -623,7 +623,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -638,7 +638,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -651,7 +651,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -667,7 +667,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); assertEquals(terminateAfter, result.aggs.intValue()); } } @@ -683,7 +683,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -698,7 +698,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -713,7 +713,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -729,7 +729,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); // aggs see more documents because the filter is not applied to them assertThat(result.aggs, Matchers.greaterThanOrEqualTo(terminateAfter)); } @@ -1139,11 +1139,6 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { public float score() { return 0; } - - @Override - public int docID() { - return 0; - } }; QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( @@ -1473,11 +1468,6 @@ public float score() throws IOException { return 0; } - @Override - public int docID() { - return 0; - } - @Override public void setMinCompetitiveScore(float minScore) { setMinCompetitiveScoreCalled = true; @@ -1521,7 +1511,7 @@ public void setScorer(Scorable scorer) throws IOException { setScorerCalled = true; if (expectedScorable != null) { while (expectedScorable.equals(scorer.getClass()) == false && scorer instanceof FilterScorable) { - scorer = scorer.getChildren().iterator().next().child; + scorer = scorer.getChildren().iterator().next().child(); } assertEquals(expectedScorable, scorer.getClass()); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 56a8b0f3a8c30..1f74668158e0e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -144,7 +144,7 @@ private void countTestCase(Query query, IndexReader reader, boolean shouldCollec QueryPhase.addCollectorsAndSearch(context); ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : noCollectionContextSearcher(reader); - assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -233,15 +233,15 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when a post_filter is provided, hence it forces collection despite @@ -249,16 +249,16 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { context.setSize(0); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -269,8 +269,8 @@ public void testTerminateAfterWithFilter() throws Exception { context.setSize(10); context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", "bar")))); QueryPhase.addCollectorsAndSearch(context); - assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } } @@ -280,15 +280,15 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when min_score is provided, hence it forces collection despite @@ -296,16 +296,16 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { context.setSize(0); context.minimumScore(100); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.minimumScore(100); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -336,17 +336,17 @@ public void testInOrderScrollOptimization() throws Exception { context.setSize(size); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); context.setSearcher(earlyTerminationContextSearcher(reader, size)); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); } } @@ -364,8 +364,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -375,8 +375,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { @@ -387,8 +387,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); // Given that total hit count does not require collection, PartialHitCountCollector does not early terminate. - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -407,8 +407,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.setSize(0); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -419,8 +419,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { @@ -434,9 +434,9 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(trackTotalHits); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) trackTotalHits)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) trackTotalHits)); assertThat( - context.queryResult().topDocs().topDocs.totalHits.relation, + context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); @@ -449,8 +449,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(11, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(10L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(10L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -468,7 +468,7 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(10); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 1), new MatchAllDocsQuery())) { @@ -477,8 +477,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -489,8 +489,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), new MatchAllDocsQuery())) { @@ -500,8 +500,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } { @@ -515,8 +515,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(size)); } } @@ -535,8 +535,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -546,8 +546,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -558,8 +558,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, 6)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -572,8 +572,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { // depending on docs distribution we may or may not be able to honor terminate_after: low scoring hits are skipped via // setMinCompetitiveScore, which bypasses terminate_after until the next leaf collector is pulled, when that happens. assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(5)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -584,9 +584,9 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(8, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); // TODO this looks off, it should probably be GREATER_THAN_OR_EQUAL_TO - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } } @@ -599,8 +599,8 @@ public void testIndexSortingEarlyTermination() throws Exception { context.setSize(1); context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -612,7 +612,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(numDocs - 1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -623,7 +623,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.executeQuery(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -686,18 +686,18 @@ public void testIndexSortScrollOptimization() throws Exception { context.sort(searchSortAndFormat); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; context.setSearcher(earlyTerminationContextSearcher(reader, 10)); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") @@ -746,8 +746,8 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.EQUAL_TO); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); } try (TestSearchContext context = createContext(newContextSearcher(reader), q)) { @@ -764,9 +764,9 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -881,8 +881,8 @@ public void testNumericSortOptimization() throws Exception { QueryPhase.addCollectorsAndSearch(searchContext); assertTrue(searchContext.sort().sort.getSort()[0].getOptimizeSortWithPoints()); assertThat(searchContext.queryResult().topDocs().topDocs.scoreDocs, arrayWithSize(0)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } // 7. Test that sort optimization doesn't break a case where from = 0 and size= 0 @@ -950,8 +950,8 @@ public void testMaxScoreQueryVisitor() { // assert score docs are in order and their number is as expected private static void assertSortResults(TopDocs topDocs, long totalNumDocs, boolean isDoubleSort) { - assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation); - assertThat(topDocs.totalHits.value, lessThan(totalNumDocs)); // we collected less docs than total number + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation()); + assertThat(topDocs.totalHits.value(), lessThan(totalNumDocs)); // we collected less docs than total number long cur1, cur2; long prev1 = Long.MIN_VALUE; long prev2 = Long.MIN_VALUE; @@ -990,7 +990,7 @@ public void testMinScore() throws Exception { context.trackTotalHitsUpTo(5); QueryPhase.addCollectorsAndSearch(context); - assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -1136,7 +1136,7 @@ private static ContextIndexSearcher earlyTerminationContextSearcher(IndexReader ) { @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] partitions, Weight weight, Collector collector) throws IOException { final Collector in = new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { @@ -1153,7 +1153,7 @@ public void collect(int doc) throws IOException { }; } }; - super.search(leaves, weight, in); + super.search(partitions, weight, in); } }; } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index 3bf9514cad547..b417f7adbc8b7 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -132,7 +133,7 @@ private void scorerTimeoutTest(int size, CheckedConsumer context = mapping.parseContext(document); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 320e3fce2e832..7791073ef36fa 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -211,8 +211,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); @@ -227,8 +227,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java index dc9d026af0135..72ae45fd26143 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java @@ -226,7 +226,7 @@ public void testRandom() throws IOException { int n = random().nextInt(100) + 1; TopDocs results = searcher.search(query, n); assert reader.hasDeletions() == false; - assertTrue(results.totalHits.value >= results.scoreDocs.length); + assertTrue(results.totalHits.value() >= results.scoreDocs.length); // verify the results are in descending score order float last = Float.MAX_VALUE; for (ScoreDoc scoreDoc : results.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index 18d5c8c85fbec..bef0bbfd27ff6 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -282,8 +282,8 @@ public void testScoreDocQuery() throws IOException { final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); TopDocs topDocs = searcher.search(query, 100); - assertEquals(scoreDocs.length, topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation); + assertEquals(scoreDocs.length, topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation()); Arrays.sort(topDocs.scoreDocs, Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); assertEquals(scoreDocs.length, topDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java index 8d9fa847a988c..f2ead93ebb6e1 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java @@ -62,14 +62,14 @@ public void testSimpleEuclidean() throws Exception { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 3f, 0.25f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 1f, 0.5f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } @@ -138,14 +138,14 @@ public void testSimpleCosine() throws IOException { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .8f, .9f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .9f, 0.95f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index f8c3edcbb9d42..c46d98fe1cd8b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -372,7 +372,7 @@ public void testSuccessfulSnapshotAndRestore() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseListener, r -> { - assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value()); documentCountVerified.set(true); }); @@ -816,7 +816,10 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { var response = safeResult(searchResponseListener); try { - assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + assertEquals( + documentsFirstSnapshot + documentsSecondSnapshot, + Objects.requireNonNull(response.getHits().getTotalHits()).value() + ); } finally { response.decRef(); } @@ -1177,7 +1180,7 @@ public void testSuccessfulSnapshotWithConcurrentDynamicMappingUpdates() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseStepListener, r -> { - final long hitCount = r.getHits().getTotalHits().value; + final long hitCount = r.getHits().getTotalHits().value(); assertThat( "Documents were restored but the restored index mapping was older than some documents and misses some of their fields", (int) hitCount, diff --git a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java index 9e5b9dd0be547..7f2cb85919d10 100644 --- a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java +++ b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java @@ -140,7 +140,7 @@ public void testRetrieveSnapshots() throws Exception { logger.info("--> run a search"); assertResponse(client.prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("text", "sometext")), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(COUNTS.intValue(), greaterThan(0)); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 0b5803e9887d6..4713adf6cf01d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -178,7 +178,7 @@ protected static void assertVisibleCount(Engine engine, int numDocs, boolean ref engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -970,7 +970,7 @@ protected static void assertVisibleCount(InternalEngine engine, int numDocs, boo engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -1168,7 +1168,10 @@ public static void assertOpsOnReplica( assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 98f18829966c7..47a227cebc956 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -502,7 +502,7 @@ private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery } protected final String readSource(IndexReader reader, int docId) throws IOException { - return reader.document(docId).getBinaryValue("_source").utf8ToString(); + return reader.storedFields().document(docId).getBinaryValue("_source").utf8ToString(); } protected final void checkExpensiveQuery(BiConsumer queryBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index b6aa3f97241e1..1c4cfa4ec7ff9 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -139,6 +140,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, new HashMap<>(), 1, diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 2ddd153b8a936..c76967e5d00ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -198,7 +198,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("daitchmokotoffsoundex", Void.class), entry("persianstem", Void.class), // not exposed - entry("word2vecsynonym", Void.class) + entry("word2vecsynonym", Void.class), + // not exposed + entry("romaniannormalization", Void.class) ); static final Map> KNOWN_CHARFILTERS = Map.of( diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index cc4aac686a02d..df1ea6b756405 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -100,7 +100,7 @@ public static TotalHits getTotalHits(SearchRequestBuilder request) { } public static long getTotalHitsValue(SearchRequestBuilder request) { - return getTotalHits(request).value; + return getTotalHits(request).value(); } public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index d35d5282238ee..5f64d123c1bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -158,6 +159,7 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -831,11 +833,8 @@ protected void debugTestCase( QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { - // Don't use searchAndReduce because we only want a single aggregator. - IndexSearcher searcher = newIndexSearcher( - reader, - aggregationBuilder.supportsParallelCollection(field -> getCardinality(reader, field)) - ); + // Don't use searchAndReduce because we only want a single aggregator, disable parallel collection too. + IndexSearcher searcher = newIndexSearcher(reader, false); if (queryCachingPolicy != null) { searcher.setQueryCachingPolicy(queryCachingPolicy); } @@ -854,7 +853,21 @@ protected void debugTestCase( try { Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); - searcher.search(context.query(), aggregator.asCollector()); + searcher.search(context.query(), new CollectorManager() { + boolean called = false; + + @Override + public Collector newCollector() { + assert called == false : "newCollector called multiple times"; + called = true; + return aggregator.asCollector(); + } + + @Override + public Void reduce(Collection collectors) { + return null; + } + }); InternalAggregation r = aggregator.buildTopLevel(); r = doReduce( List.of(r), @@ -959,11 +972,11 @@ protected DirectoryReader wrapDirectoryReader(DirectoryReader reader) throws IOE } private static class ShardSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -972,7 +985,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index b1b75c1790287..29112b4bd8f5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -232,7 +232,7 @@ public void setupSuiteScopeCluster() throws Exception { .setSize(5000), response -> { assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; + long totalHits = response.getHits().getTotalHits().value(); XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index f191012fb4ef8..f87a87c5ddbc8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -40,7 +40,7 @@ public void testEmptyAggregation() { .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), response -> { CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName())); assertThat(geoCentroid.centroid(), equalTo(null)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index cb6a58ed65a02..a0fdb0bfabf98 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -155,7 +155,7 @@ public void testEmptyAggregation() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); SpatialBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName())); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 71956b431d9b7..fd41213dcd81d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -104,7 +104,7 @@ public void testIndexPointsFilterRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -113,7 +113,7 @@ public void testIndexPointsFilterRectangle() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -170,7 +170,7 @@ public void testIndexPointsPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); } ); @@ -209,7 +209,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -219,7 +219,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -229,7 +229,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -238,7 +238,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); @@ -264,7 +264,7 @@ public void testIndexPointsRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -319,7 +319,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index 91c7a25682ae0..6dca91170c7a5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -206,7 +206,7 @@ public void testRandomGeoCollectionQuery() throws Exception { QueryBuilder intersects = queryBuilder().intersectionQuery(defaultFieldName, queryCollection); assertNoFailuresAndResponse(client().prepareSearch(defaultIndexName).setQuery(intersects), response -> { - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value() > 0); }); } @@ -352,7 +352,7 @@ public void testEdgeCases() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); } @@ -457,7 +457,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -466,7 +466,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index 4e7378d3a9606..3cd52124d8556 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -68,7 +68,7 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); }); @@ -112,7 +112,7 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("4", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); @@ -155,7 +155,7 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("3", searchHits.getAt(0).getId()); assertNotEquals("3", searchHits.getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index c84d8612b1d4b..97e21f64e2648 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -102,7 +102,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -114,7 +114,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -126,7 +126,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom && left == right .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("1")); @@ -138,7 +138,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -150,7 +150,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // left == right .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -163,7 +163,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 4e47b0c51177c..bb57cb132daa2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -143,7 +143,7 @@ public void testEnvelopeSpanningDateline() throws Exception { } ); assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertNotEquals("1", response.getHits().getAt(0).getId()); assertNotEquals("1", response.getHits().getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index f1ff3492426aa..a93f3b7eaf109 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -73,7 +73,7 @@ public static void corruptFile(Random random, Path... files) throws IOException long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 87a834d6424b7..d7c5c598ce978 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -908,9 +908,11 @@ public void waitNoPendingTasksOnAll() throws Exception { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); - String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); + String value = Long.toString(totalHits.value()) + (totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ? "+" + : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 31c8e5bc3d457..e1ba661eb24d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -33,6 +33,8 @@ import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.tests.util.TestRuleMarkFailure; @@ -208,6 +210,7 @@ import java.util.stream.LongStream; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.anyOf; @@ -2637,4 +2640,43 @@ public static void ensureAllContextsReleased(SearchService searchService) { throw new AssertionError("Failed to verify search contexts", e); } } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r) { + return newSearcher(r, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) { + return newSearcher(r, maybeWrap, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions) { + return newSearcher(r, maybeWrap, wrapWithAssertions, randomBoolean()); + } + + /** + * Create a new searcher over the reader. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions, boolean useThreads) { + if (useThreads) { + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.INTER_SEGMENT); + } + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.NONE); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index db30f6e91f039..42439b5d5785d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -86,13 +86,6 @@ public Terms terms(String field) throws IOException { return terms; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields fields = super.getTermVectors(docID); - thrower.maybeThrow(Flags.TermVectors); - return fields == null ? null : new ThrowingFields(fields, thrower); - } - /** * Wraps a Fields but with additional asserts */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 552e301650d9d..5851fc709d14a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -314,7 +314,7 @@ public static void assertHitCount(ActionFuture responseFuture, l public static void assertHitCount(SearchResponse countResponse, long expectedHitCount) { final TotalHits totalHits = countResponse.getHits().getTotalHits(); - if (totalHits.relation != TotalHits.Relation.EQUAL_TO || totalHits.value != expectedHitCount) { + if (totalHits.relation() != TotalHits.Relation.EQUAL_TO || totalHits.value() != expectedHitCount) { fail("Count is " + totalHits + " but " + expectedHitCount + " was expected. " + formatShardStatus(countResponse)); } } @@ -346,7 +346,7 @@ public static void assertFourthHit(SearchResponse searchResponse, Matcher matcher) { assertThat("SearchHit number must be greater than 0", number, greaterThan(0)); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo((long) number)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo((long) number)); assertThat(searchResponse.getHits().getAt(number - 1), matcher); } @@ -409,13 +409,13 @@ public static void assertScrollResponsesAndHitCount( responses.add(scrollResponse); int retrievedDocsCount = 0; try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); while (scrollResponse.getHits().getHits().length > 0) { scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(keepAlive).get(); responses.add(scrollResponse); - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); } @@ -704,8 +704,8 @@ public static T assertBooleanSubQuery(Query query, Class su assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; assertThat(q.clauses(), hasSize(greaterThan(i))); - assertThat(q.clauses().get(i).getQuery(), instanceOf(subqueryType)); - return subqueryType.cast(q.clauses().get(i).getQuery()); + assertThat(q.clauses().get(i).query(), instanceOf(subqueryType)); + return subqueryType.cast(q.clauses().get(i).query()); } /** diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 885e02a8b5e6a..f517c03468bc2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -69,8 +69,12 @@ public void testSimple() throws IOException { }; AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")); testCase(iw -> { - iw.addDocuments(docs(1000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(1000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } @@ -109,8 +113,12 @@ public void testNestedWithinDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")) .withSplitLeavesIntoSeperateAggregators(false); testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index 20da254657c1a..04f0563e433a2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -170,7 +170,7 @@ public void testNoMatchingField() throws IOException { } public void testQueryFiltering() throws IOException { - testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { + testAggregation(new TermInSetQuery("text", List.of(new BytesRef("test0"), new BytesRef("test1"))), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 52ecc40c957b7..aee344777779b 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -314,9 +314,9 @@ private AsyncSearchResponse doNext() throws Exception { assertThat(newResponse.getSearchResponse().getShardFailures().length, equalTo(numFailures)); assertNull(newResponse.getSearchResponse().getAggregations()); assertNotNull(newResponse.getSearchResponse().getHits().getTotalHits()); - assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(0L)); + assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value(), equalTo(0L)); assertThat( - newResponse.getSearchResponse().getHits().getTotalHits().relation, + newResponse.getSearchResponse().getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); } else { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index 302bb68af6c61..fd4463df07a73 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -62,7 +62,7 @@ public void testFetchFailuresAllShards() throws Exception { assertEquals(10, searchResponse.getSuccessfulShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(0, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); @@ -106,7 +106,7 @@ public void testFetchFailuresOnlySomeShards() throws Exception { assertEquals(10, searchResponse.getTotalShards()); assertEquals(5, searchResponse.getSuccessfulShards()); assertEquals(5, searchResponse.getFailedShards()); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(5, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 95b2324d03b52..16645e7523c36 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -390,12 +390,11 @@ public IndexInput clone() { /** Returns default buffer sizes for the given {@link IOContext} */ public static int bufferSize(IOContext context) { - switch (context.context) { + switch (context.context()) { case MERGE: return MERGE_BUFFER_SIZE; case DEFAULT: case FLUSH: - case READ: default: return BUFFER_SIZE; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 6b390ab5747a8..164e6ed5406ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -245,7 +245,7 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { - var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 1b7875e4a36b4..618489abd687e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -703,7 +703,7 @@ protected void atLeastDocsIndexed(Client client, String index, long numDocsRepli request.source(new SearchSourceBuilder().size(0)); assertResponse(client.search(request), response -> { assertNotNull(response.getHits().getTotalHits()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(numDocsReplicated)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(numDocsReplicated)); }); }, 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 0fea3c0d3b74f..1bf52b663b30f 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -274,7 +274,7 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele }; assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value()); }); SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(1)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java index e66d41d089437..12864dd66a857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; @@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; @@ -214,6 +213,11 @@ public NumericDocValues getNormValues(String field) { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -257,11 +261,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docId) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -282,11 +281,6 @@ public int maxDoc() { return maxDoc; } - @Override - public void document(int docID, StoredFieldVisitor visitor) { - throw new UnsupportedOperationException(); - } - @Override protected void doClose() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 093ec031d0b30..421a306babf29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,6 +254,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, + fieldInfo.docValuesSkipIndexType(), -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java index dea158b425071..d315f09ebda88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java @@ -105,7 +105,7 @@ protected void searchResources(AbstractGetResourcesRequest request, TaskId paren listener.delegateFailure((l, response) -> { List docs = new ArrayList<>(); Set foundResourceIds = new HashSet<>(); - long totalHitCount = response.getHits().getTotalHits().value; + long totalHitCount = response.getHits().getTotalHits().value(); for (SearchHit hit : response.getHits().getHits()) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java index de43f744c307b..4e5f97acacf64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java @@ -145,7 +145,7 @@ default SearchSourceBuilder buildSearch(EvaluationParameters parameters, QueryBu */ default void process(SearchResponse searchResponse) { Objects.requireNonNull(searchResponse); - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { String requiredFieldsString = String.join(", ", getRequiredFields()); throw ExceptionsHelper.badRequestException("No documents found containing all the required fields [{}]", requiredFieldsString); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index 3154fe5999b8e..129619f6976e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -79,19 +79,19 @@ public void onResponse(SearchResponse resp) { } } - if (results.size() > resp.getHits().getTotalHits().value) { + if (results.size() > resp.getHits().getTotalHits().value()) { clearScroll.accept(lastResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + results.size() + "] than expected [" - + resp.getHits().getTotalHits().value + + resp.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) ); - } else if (results.size() == resp.getHits().getTotalHits().value) { + } else if (results.size() == resp.getHits().getTotalHits().value()) { clearScroll.accept(resp); // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0061870c73cc9..32b12c834dd9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -55,8 +55,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("took", tookInMillis); builder.startObject("total"); { - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); } builder.endObject(); builder.startArray("profiles"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 09a49c53ee1a5..908f58c5f9147 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -155,17 +155,6 @@ public FieldInfos getFieldInfos() { return fieldInfos; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f == null) { - return null; - } - f = new FieldFilterFields(f); - // we need to check for emptyness, so we can return null: - return f.iterator().hasNext() ? f : null; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -264,11 +253,6 @@ private static int step(CharacterRunAutomaton automaton, String key, int state) return state; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - super.document(docID, new FieldSubsetStoredFieldVisitor(visitor)); - } - @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return new FieldSubsetStoredFieldsReader(reader); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index c85a648761ca7..5ba5c1fd1218a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -187,7 +187,7 @@ private PermissionEntry(ApplicationPrivilege privilege, Set resourceName } private boolean grants(ApplicationPrivilege other, Automaton resource) { - return matchesPrivilege(other) && Operations.subsetOf(resource, this.resourceAutomaton); + return matchesPrivilege(other) && Automatons.subsetOf(resource, this.resourceAutomaton); } private boolean matchesPrivilege(ApplicationPrivilege other) { @@ -202,7 +202,7 @@ private boolean matchesPrivilege(ApplicationPrivilege other) { } return Operations.isEmpty(privilege.getAutomaton()) == false && Operations.isEmpty(other.getAutomaton()) == false - && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()); + && Automatons.subsetOf(other.getAutomaton(), privilege.getAutomaton()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 9c41786f39eeb..4e608281a7858 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; @@ -215,7 +214,7 @@ public final boolean check(final String action, final TransportRequest request, @Override public final boolean implies(final PermissionCheck permissionCheck) { if (permissionCheck instanceof ActionBasedPermissionCheck) { - return Operations.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) + return Automatons.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) && doImplies((ActionBasedPermissionCheck) permissionCheck); } return false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index f3c2d9f62e40f..235d7419d2bf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; @@ -34,8 +33,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.apache.lucene.util.automaton.Operations.subsetOf; - /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. @@ -175,10 +172,14 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel deniedFieldsAutomaton = Automatons.patterns(deniedFields); } - grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + grantedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); + deniedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); - if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { + if (Automatons.subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { throw new ElasticsearchSecurityException( "Exceptions for field permissions must be a subset of the " + "granted fields but " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index e1b72cc43b38e..558f8e6f22ac1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -288,7 +288,7 @@ public boolean checkResourcePrivileges( if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { - if (Operations.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { + if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) @@ -301,7 +301,7 @@ public boolean checkResourcePrivileges( for (String privilege : checkForPrivileges) { IndexPrivilege indexPrivilege = IndexPrivilege.get(Collections.singleton(privilege)); if (allowedIndexPrivilegesAutomaton != null - && Operations.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { + && Automatons.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { if (resourcePrivilegesMapBuilder != null) { resourcePrivilegesMapBuilder.addResourcePrivilege(forIndexPattern, privilege, Boolean.TRUE); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java index 68e3f11751aac..7434128f03129 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.security.support.Automatons; @@ -90,7 +89,7 @@ public static SortedMap sortByAccessLevel(Map subsetCount.put( name, - privileges.values().stream().filter(p2 -> p2 != priv && Operations.subsetOf(priv.automaton, p2.automaton)).count() + privileges.values().stream().filter(p2 -> p2 != priv && Automatons.subsetOf(priv.automaton, p2.automaton)).count() ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index a6347d8b7ec77..201cb4b69e472 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,9 +9,10 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.automaton.StatePair; +import org.apache.lucene.util.automaton.Transition; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; @@ -20,6 +21,7 @@ import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -223,7 +225,10 @@ private static Automaton buildAutomaton(String pattern) { ); } String regex = pattern.substring(1, pattern.length() - 1); - return new RegExp(regex).toAutomaton(); + return Operations.determinize( + new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + DEFAULT_DETERMINIZE_WORK_LIMIT + ); } else if (pattern.equals("*")) { return MATCH_ALL; } else { @@ -269,7 +274,7 @@ static Automaton wildcard(String text) { } i += length; } - return concatenate(automata); + return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public static Automaton unionAndMinimize(Collection automata) { @@ -288,7 +293,7 @@ public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { } private static Automaton minimize(Automaton automaton) { - return MinimizationOperations.minimize(automaton, maxDeterminizedStates); + return Operations.determinize(automaton, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { @@ -329,7 +334,8 @@ private static Predicate predicate(Automaton automaton, final String toS } else if (automaton == EMPTY) { return Predicates.never(); } - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); + automaton = Operations.determinize(automaton, maxDeterminizedStates); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); return new Predicate() { @Override public boolean test(String s) { @@ -368,4 +374,72 @@ static List getPatterns(Automaton automaton) { throw new IllegalArgumentException("recordPatterns is set to false"); } } + + /** + * Returns true if the language of a1 is a subset of the language of a2. + * Both automata must be determinized and must have no dead states. + * + *

    Complexity: quadratic in number of states. + * Copied of Lucene's AutomatonTestUtil + */ + public static boolean subsetOf(Automaton a1, Automaton a2) { + if (a1.isDeterministic() == false) { + throw new IllegalArgumentException("a1 must be deterministic"); + } + if (a2.isDeterministic() == false) { + throw new IllegalArgumentException("a2 must be deterministic"); + } + assert Operations.hasDeadStatesFromInitial(a1) == false; + assert Operations.hasDeadStatesFromInitial(a2) == false; + if (a1.getNumStates() == 0) { + // Empty language is alwyas a subset of any other language + return true; + } else if (a2.getNumStates() == 0) { + return Operations.isEmpty(a1); + } + + // TODO: cutover to iterators instead + Transition[][] transitions1 = a1.getSortedTransitions(); + Transition[][] transitions2 = a2.getSortedTransitions(); + ArrayDeque worklist = new ArrayDeque<>(); + HashSet visited = new HashSet<>(); + StatePair p = new StatePair(0, 0); + worklist.add(p); + visited.add(p); + while (worklist.size() > 0) { + p = worklist.removeFirst(); + if (a1.isAccept(p.s1) && a2.isAccept(p.s2) == false) { + return false; + } + Transition[] t1 = transitions1[p.s1]; + Transition[] t2 = transitions2[p.s2]; + for (int n1 = 0, b2 = 0; n1 < t1.length; n1++) { + while (b2 < t2.length && t2[b2].max < t1[n1].min) { + b2++; + } + int min1 = t1[n1].min, max1 = t1[n1].max; + + for (int n2 = b2; n2 < t2.length && t1[n1].max >= t2[n2].min; n2++) { + if (t2[n2].min > min1) { + return false; + } + if (t2[n2].max < Character.MAX_CODE_POINT) { + min1 = t2[n2].max + 1; + } else { + min1 = Character.MAX_CODE_POINT; + max1 = Character.MIN_CODE_POINT; + } + StatePair q = new StatePair(t1[n1].dest, t2[n2].dest); + if (visited.contains(q) == false) { + worklist.add(q); + visited.add(q); + } + } + if (min1 <= max1) { + return false; + } + } + } + return true; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index b4952373dfdd3..92568c4f31c18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -69,6 +70,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e39ddc170c0a9..54390365c62af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.MatchAllDocsQuery; @@ -400,14 +401,14 @@ public void onFailure(Exception e) { try (Engine.Searcher searcher = restoredShard.acquireSearcher("test")) { assertEquals(searcher.getIndexReader().maxDoc(), seqNoStats.getLocalCheckpoint()); TopDocs search = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); search = searcher.search( new MatchAllDocsQuery(), Integer.MAX_VALUE, new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)), false ); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); long previous = -1; for (ScoreDoc doc : search.scoreDocs) { FieldDoc fieldDoc = (FieldDoc) doc; @@ -430,8 +431,9 @@ public void onFailure(Exception e) { assertEquals(original.exists(), restored.exists()); if (original.exists()) { - Document document = original.docIdAndVersion().reader.document(original.docIdAndVersion().docId); - Document restoredDocument = restored.docIdAndVersion().reader.document(restored.docIdAndVersion().docId); + StoredFields storedFields = original.docIdAndVersion().reader.storedFields(); + Document document = storedFields.document(original.docIdAndVersion().docId); + Document restoredDocument = storedFields.document(restored.docIdAndVersion().docId); for (IndexableField field : document) { assertEquals(document.get(field.name()), restoredDocument.get(field.name())); } @@ -470,7 +472,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throw for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); - leafReader.document(i, rootFieldsVisitor); + leafReader.storedFields().document(i, rootFieldsVisitor); rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 65d057408f8bd..8433f38e40a0c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -108,8 +109,10 @@ DirectoryReader wrapReader(DirectoryReader reader) throws IOException { logger.warn(snapReader + " " + reader); assertEquals(snapReader.maxDoc(), reader.maxDoc()); assertEquals(snapReader.numDocs(), reader.numDocs()); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < snapReader.maxDoc(); i++) { - assertEquals(snapReader.document(i).get("_source"), reader.document(i).get("_source")); + assertEquals(snapStoredFields.document(i).get("_source"), storedFields.document(i).get("_source")); } for (LeafReaderContext ctx : snapReader.leaves()) { if (ctx.reader() instanceof SegmentReader) { @@ -188,12 +191,14 @@ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 3); assertEquals(snapReader.numDocs(), 2); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < 3; i++) { - assertEquals(snapReader.document(i).get("src"), reader.document(i).get("src")); + assertEquals(snapStoredFields.document(i).get("src"), storedFields.document(i).get("src")); } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); - assertEquals(0, id.totalHits.value); + assertEquals(0, id.totalHits.value()); } targetDir = newDirectory(targetDir); @@ -321,7 +326,7 @@ public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 1); assertEquals(snapReader.numDocs(), 1); - assertEquals("3", snapReader.document(0).getField("rank").stringValue()); + assertEquals("3", snapReader.storedFields().document(0).getField("rank").stringValue()); } try (IndexReader writerReader = DirectoryReader.open(writer)) { assertEquals(writerReader.maxDoc(), 2); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index bb727204e2651..114ad90354c61 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -279,7 +279,7 @@ private void assertCorrectLuceneQuery(String name, Query query, List exp booleanClauses.size() ); for (int i = 0; i < booleanClauses.size(); i++) { - Query clauseQuery = booleanClauses.get(i).getQuery(); + Query clauseQuery = booleanClauses.get(i).query(); assertTrue(name + " query " + query + " expected to be a BoostQuery", clauseQuery instanceof BoostQuery); // FeatureQuery is not visible so we check the String representation assertTrue(name + " query " + query + " expected to be a FeatureQuery", clauseQuery.toString().contains("FeatureQuery")); @@ -353,8 +353,8 @@ protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Quer Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index c40dd00e0e350..6fe271d1b05e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -96,25 +96,25 @@ public void testSearch() throws Exception { ); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(0L)); + assertThat(result.totalHits.value(), equalTo(0L)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 560dee9b5843c..db250b16eab16 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; @@ -205,8 +206,9 @@ public void testKnnVectors() throws Exception { FloatVectorValues vectorValues = leafReader.getFloatVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -215,7 +217,7 @@ public void testKnnVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getFloatVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -239,8 +241,9 @@ public void testKnnByteVectors() throws Exception { ByteVectorValues vectorValues = leafReader.getByteVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -249,7 +252,7 @@ public void testKnnByteVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getByteVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -274,11 +277,6 @@ public void testStoredFieldsString() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("testA", d2.get("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -306,11 +304,6 @@ public void testStoredFieldsBinary() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(new BytesRef("testA"), d2.getBinaryValue("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -338,11 +331,6 @@ public void testStoredFieldsInt() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -370,11 +358,6 @@ public void testStoredFieldsLong() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1L, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -402,11 +385,6 @@ public void testStoredFieldsFloat() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1F, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -434,11 +412,6 @@ public void testStoredFieldsDouble() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1D, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -468,7 +441,7 @@ public void testVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - Fields vectors = ir.getTermVectors(0); + Fields vectors = ir.termVectors().get(0); Set seenFields = new HashSet<>(); for (String field : vectors) { seenFields.add(field); @@ -615,7 +588,6 @@ public void testSortedSetDocValues() throws Exception { assertNotNull(dv); assertTrue(dv.advanceExact(0)); assertEquals(0, dv.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(new BytesRef("testA"), dv.lookupOrd(0)); assertNull(segmentReader.getSortedSetDocValues("fieldB")); @@ -702,11 +674,6 @@ public void testSourceFilteringIntegration() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("{\"fieldA\":\"testA\"}", d2.getBinaryValue(SourceFieldMapper.NAME).utf8ToString()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -1201,7 +1168,7 @@ public void testFilterAwayAllVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); // sees no fields - assertNull(ir.getTermVectors(0)); + assertNull(ir.termVectors().get(0)); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); @@ -1229,14 +1196,9 @@ public void testEmpty() throws Exception { assertNull(segmentReader.terms("foo")); // see no vectors - assertNull(segmentReader.getTermVectors(0)); assertNull(segmentReader.termVectors().get(0)); // see no stored fields - { - Document document = segmentReader.document(0); - assertEquals(0, document.getFields().size()); - } { Document document = segmentReader.storedFields().document(0); assertEquals(0, document.getFields().size()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index df64c4f87410a..4751f66cf548e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -193,7 +193,7 @@ protected IndicesAccessControl getIndicesAccessControl() { int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); - Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(indexSearcher.getSlices())); assertThat(totalHits, equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index 265714ee6ea16..073b3b92a43a5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -17,6 +16,7 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Collection; @@ -83,7 +83,7 @@ public void testPrivilegesForGetCheckPointAction() { public void testRelationshipBetweenPrivileges() { assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("view_index_metadata")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -91,12 +91,12 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(true) ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create", "create_doc", "index", "delete")).automaton, IndexPrivilege.get(Set.of("write")).automaton ), @@ -104,7 +104,7 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create_index", "delete_index")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -122,7 +122,7 @@ public void testCrossClusterReplicationPrivileges() { "indices:admin/seq_no/renew_retention_lease" ).forEach(action -> assertThat(crossClusterReplication.predicate.test(action + randomAlphaOfLengthBetween(0, 8)), is(true))); assertThat( - Operations.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), + Automatons.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), is(true) ); @@ -139,10 +139,10 @@ public void testCrossClusterReplicationPrivileges() { ); assertThat( - Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(false) ); - assertThat(Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); + assertThat(Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 6f3c435eb12f6..a58acf82ea44e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -218,13 +218,13 @@ public void testIndexCollapse() throws Exception { Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + if (Automatons.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Automatons.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } else { - assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 0b2e48bd20dfe..94f91f427e19a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.List; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import static org.elasticsearch.xpack.core.security.support.Automatons.pattern; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; @@ -115,12 +114,12 @@ public void testPatternComplexity() { } private void assertMatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertTrue(runAutomaton.run(text)); } private void assertMismatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertFalse(runAutomaton.run(text)); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d3dcd7ae36f59..65d53d3adabe7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -125,7 +125,7 @@ public void testRunner() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = searchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -176,7 +176,7 @@ public void testRunner() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -202,7 +202,7 @@ public void testRunnerGeoMatchType() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); @@ -244,7 +244,7 @@ public void testRunnerGeoMatchType() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -286,7 +286,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("range"), is(equalTo(Map.of("lt", 10, "gt", 1)))); @@ -330,7 +330,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -376,7 +376,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { new SearchRequest(sourceIndexName).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("subnet"), is(equalTo("10.0.0.0/8"))); @@ -421,7 +421,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -460,7 +460,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -522,7 +522,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -564,7 +564,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -633,7 +633,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -688,7 +688,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -749,7 +749,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -943,7 +943,7 @@ public void testRunnerObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -993,7 +993,7 @@ public void testRunnerObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1051,7 +1051,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1100,7 +1100,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1158,7 +1158,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1209,7 +1209,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1273,7 +1273,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1329,7 +1329,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1395,7 +1395,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1450,7 +1450,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1519,7 +1519,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1580,7 +1580,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); assertResponse( @@ -1590,7 +1590,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E ) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1614,7 +1614,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); // Validate segments @@ -1657,7 +1657,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); @@ -1704,7 +1704,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -1736,7 +1736,7 @@ public void testRunnerWithForceMergeRetry() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -1868,7 +1868,7 @@ protected void afterRefreshEnrichIndex(ActionListener listener) { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -1901,7 +1901,7 @@ public void testRunnerWithEmptySegmentsResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2007,7 +2007,7 @@ public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2405,7 +2405,7 @@ public void testRunnerValidatesIndexIntegrity() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 00f22aca2cb92..8dbc9b0f4f43a 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -64,7 +64,7 @@ public void testExecute() throws Exception { assertThat(response.getResponses().length, equalTo(numSearches)); for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); - assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); assertThat( response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index cd98b43adc159..5e1fde0dfb942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -1073,7 +1073,7 @@ private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnecto final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); - return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value()); } private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { @@ -1115,7 +1115,7 @@ private void isDataIndexNameAlreadyInUse(String indexName, String connectorId, A client.search(searchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value > 0L; + boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value() > 0L; listener.onResponse(indexNameIsInUse); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9ef895a3a5786..ce6f7f0dbf2b2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -417,7 +417,7 @@ private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchR .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); - return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value); + return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value()); } private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 2eec155ae8ea2..8bf4bbd5716b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -436,7 +436,7 @@ private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResp final List rulesetResults = Arrays.stream(response.getHits().getHits()) .map(QueryRulesIndexService::hitToQueryRulesetListItem) .toList(); - return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value); + return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 9e8a8f750b764..30d533aeb9ae5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -416,7 +416,7 @@ private static SearchApplicationResult mapSearchResponse(SearchResponse response final List apps = Arrays.stream(response.getHits().getHits()) .map(SearchApplicationIndexService::hitToSearchApplicationListItem) .toList(); - return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value); + return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value()); } private static SearchApplicationListItem hitToSearchApplicationListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index be1d4c0871ca7..2b7b8b074fa71 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -582,8 +582,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.HITS); if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (events != null) { diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index b04d28654f1d5..00c08096fd084 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -769,7 +769,7 @@ process where command_line regex "^.*?net.exe" regexSingleArgInsensitive process where command_line regex~ "^.*?net.exe" ; -"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":65791,"case_insensitive":true ; regexMultiArg @@ -781,7 +781,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe") regexMultiArgInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":65791,"case_insensitive":true ; regexMultiMultiArgVariant @@ -793,7 +793,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\ regexMultiMultiArgVariantInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\system32\\\\net1\\s+") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":65791,"case_insensitive":true ; regexMultiArgWithScript diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index f437dc5819dcb..4e559f564acb1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index 7cedbc4742138..3e9cbf92727c2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 1c9c97a364fc7..9633051781f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -120,7 +120,7 @@ static List> docSlices(IndexReader indexReader, i } static List> segmentSlices(List leafContexts) { - IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); - return Arrays.stream(gs).map(g -> Arrays.stream(g.leaves).map(PartialLeafReaderContext::new).toList()).toList(); + IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE, false); + return Arrays.stream(gs).map(g -> Arrays.stream(g.partitions).map(PartialLeafReaderContext::new).toList()).toList(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2e32d20a2365e..0f600958b93b3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; @@ -230,8 +231,9 @@ static final class PerShardCollector { if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index e9063c9597c5f..c92dc75397729 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; /** * A subset of a {@link LeafReaderContext}. @@ -16,6 +17,10 @@ * @param maxDoc one more than the last document */ public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { + public PartialLeafReaderContext(IndexSearcher.LeafReaderContextPartition partition) { + this(partition.ctx, partition.minDocId, partition.maxDocId); + } + public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 6937f1a8c7772..f70cfe1dc8a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; @@ -89,7 +90,7 @@ public Page getOutput() { continue; } final DocCollector collector = new DocCollector(docsBuilder); - scorer.score(collector, leaf.reader().getLiveDocs()); + scorer.score(collector, leaf.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); int matches = collector.matches; if (segmentsBuilder != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java index 09166f0cff7a8..0af22a357aeca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -37,7 +37,7 @@ public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. */ Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true); return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 7ff09c23a1403..5903d725bf9c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -441,7 +441,7 @@ public NamedExpression visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePat // use the fast run variant result = new UnresolvedNamePattern( src, - new CharacterRunAutomaton(Operations.concatenate(list)), + new CharacterRunAutomaton(Operations.determinize(Operations.concatenate(list), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)), patternString.toString(), nameString.toString() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java index 214c7b1053359..f6668db52b93b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java @@ -69,14 +69,6 @@ public String toString(String field) { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { @@ -96,12 +88,12 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti * can't do that because we need the check the number of fields. */ if (lfd instanceof LeafNumericFieldData n) { - return scorerSupplier(context, n.getLongValues(), this, boost, scoreMode); + return scorerSupplier(context, n.getLongValues(), boost, scoreMode); } if (lfd instanceof LeafOrdinalsFieldData o) { - return scorerSupplier(context, o.getOrdinalsValues(), this, boost, scoreMode); + return scorerSupplier(context, o.getOrdinalsValues(), boost, scoreMode); } - return scorerSupplier(context, lfd.getBytesValues(), this, boost, scoreMode); + return scorerSupplier(context, lfd.getBytesValues(), boost, scoreMode); } @Override @@ -113,7 +105,6 @@ public boolean isCacheable(LeafReaderContext ctx) { private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedNumericDocValues sortedNumerics, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -122,16 +113,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final PointValues points = context.reader().getPointValues(fieldData.getFieldName()); if (points != null && points.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { - return new PredicateScorerSupplier( - weight, - boost, - scoreMode, - maxDoc, - MULTI_VALUE_MATCH_COST, - sortedNumerics::advanceExact - ); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, sortedNumerics::advanceExact); } } final CheckedIntPredicate predicate = doc -> { @@ -144,13 +128,12 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedSetDocValues sortedSetDocValues, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -159,10 +142,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final Terms terms = context.reader().terms(fieldData.getFieldName()); if (terms != null && terms.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -181,20 +163,18 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedBinaryDocValues sortedBinaryDocValues, - Weight weight, float boost, ScoreMode scoreMode ) { final int maxDoc = context.reader().maxDoc(); if (FieldData.unwrapSingleton(sortedBinaryDocValues) != null) { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -212,7 +192,7 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } }; } @@ -266,13 +246,11 @@ public int hashCode() { private static class DocIdSetIteratorScorerSupplier extends ScorerSupplier { - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final DocIdSetIterator docIdSetIterator; - private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { - this.weight = weight; + private DocIdSetIteratorScorerSupplier(float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { this.score = score; this.scoreMode = scoreMode; this.docIdSetIterator = docIdSetIterator; @@ -280,7 +258,7 @@ private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode sco @Override public Scorer get(long leadCost) { - return new ConstantScoreScorer(weight, score, scoreMode, docIdSetIterator); + return new ConstantScoreScorer(score, scoreMode, docIdSetIterator); } @Override @@ -290,23 +268,13 @@ public long cost() { } private static class PredicateScorerSupplier extends ScorerSupplier { - - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final int maxDoc; private final int matchCost; private final CheckedIntPredicate predicate; - private PredicateScorerSupplier( - Weight weight, - float score, - ScoreMode scoreMode, - int maxDoc, - int matchCost, - CheckedIntPredicate predicate - ) { - this.weight = weight; + private PredicateScorerSupplier(float score, ScoreMode scoreMode, int maxDoc, int matchCost, CheckedIntPredicate predicate) { this.score = score; this.scoreMode = scoreMode; this.maxDoc = maxDoc; @@ -327,7 +295,7 @@ public float matchCost() { return matchCost; } }; - return new ConstantScoreScorer(weight, score, scoreMode, iterator); + return new ConstantScoreScorer(score, scoreMode, iterator); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index 107c2af11c4f1..04da5d406fbb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -108,7 +108,7 @@ public void testQueries() throws Exception { QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); - assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", new BytesRef("c1"), new BytesRef("a2")))); + assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); assertThat(queryList.getQuery(2), equalTo(new TermQuery(new Term("uid", new BytesRef("z2"))))); assertNull(queryList.getQuery(3)); assertThat(queryList.getQuery(4), equalTo(new TermQuery(new Term("uid", new BytesRef("a3"))))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 2ba397a3cb3de..95444c9b2423f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -232,7 +232,7 @@ private Object randomValue() { private List docFor(int i, Iterable values) { List fields = new ArrayList<>(); - fields.add(new LongField("i", i)); + fields.add(new LongField("i", i, Field.Store.NO)); fields.add(new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)); switch (fieldType) { case "long", "integer", "short", "byte" -> { @@ -270,7 +270,10 @@ public List> build(RandomIndexWriter iw) throws IOException { List> fieldValues = new ArrayList<>(100); for (int i = 0; i < 100; i++) { iw.addDocument( - List.of(new LongField("i", i), new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)) + List.of( + new LongField("i", i, Field.Store.NO), + new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO) + ) ); fieldValues.add(List.of()); } diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 3623d3671e83f..6d90b0e67ee83 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.graph.test; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; @@ -165,7 +165,7 @@ public void testLargeNumberTermsStartCrawl() { VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - for (int i = 0; i < BooleanQuery.getMaxClauseCount() + 1; i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount() + 1; i++) { peopleNames.addInclude("unknown" + i, 1); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 36e8eaf94c8be..b60ce13e0228c 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; @@ -564,7 +564,7 @@ private static void addBigOrClause(Map> lastHopFindings, Boo for (Entry> entry : lastHopFindings.entrySet()) { numClauses += entry.getValue().size(); } - if (numClauses < BooleanQuery.getMaxClauseCount()) { + if (numClauses < IndexSearcher.getMaxClauseCount()) { // We can afford to build a Boolean OR query with individual // boosts for interesting terms for (Entry> entry : lastHopFindings.entrySet()) { @@ -755,7 +755,7 @@ private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) { private static void addNormalizedBoosts(BoolQueryBuilder includesContainer, VertexRequest vr) { TermBoost[] termBoosts = vr.includeValues(); - if ((includesContainer.should().size() + termBoosts.length) > BooleanQuery.getMaxClauseCount()) { + if ((includesContainer.should().size() + termBoosts.length) > IndexSearcher.getMaxClauseCount()) { // Too many terms - we need a cheaper form of query to execute this List termValues = new ArrayList<>(); for (TermBoost tb : termBoosts) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 73c0f6d4c7685..54d83af8f5d95 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -135,7 +135,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 8416d58cb1328..f444719c730f5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -598,7 +598,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -606,7 +606,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -614,7 +614,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -622,7 +622,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")), 10 ); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index f54ce89183079..b8bcb766b53e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -199,9 +199,9 @@ private void assertSparseEmbeddingLuceneQuery(Query query) { BooleanQuery innerBooleanQuery = (BooleanQuery) innerQuery; assertThat(innerBooleanQuery.clauses().size(), equalTo(queryTokenCount)); innerBooleanQuery.forEach(c -> { - assertThat(c.getOccur(), equalTo(SHOULD)); - assertThat(c.getQuery(), instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) c.getQuery()).getBoost(), equalTo(TOKEN_WEIGHT)); + assertThat(c.occur(), equalTo(SHOULD)); + assertThat(c.query(), instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) c.query()).getBoost(), equalTo(TOKEN_WEIGHT)); }); } @@ -223,7 +223,7 @@ private Query assertOuterBooleanQuery(Query query) { List outerMustClauses = new ArrayList<>(); List outerFilterClauses = new ArrayList<>(); for (BooleanClause clause : outerBooleanQuery.clauses()) { - BooleanClause.Occur occur = clause.getOccur(); + BooleanClause.Occur occur = clause.occur(); if (occur == MUST) { outerMustClauses.add(clause); } else if (occur == FILTER) { @@ -236,7 +236,7 @@ private Query assertOuterBooleanQuery(Query query) { assertThat(outerMustClauses.size(), equalTo(1)); assertThat(outerFilterClauses.size(), equalTo(1)); - return outerMustClauses.get(0).getQuery(); + return outerMustClauses.get(0).query(); } @Override diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index 081a170aac9f1..bb4464542a422 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -87,7 +87,7 @@ protected void doExecute(Task task, GetPipelineRequest request, ActionListener { - final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value); + final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value()); final Map pipelineSources = Maps.newMapWithExpectedSize(numHits); final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { @@ -148,14 +148,14 @@ private void handleFilteringSearchResponse( ActionListener listener ) { int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length; - if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + numberOfHitsSeenSoFar + "] than expected [" - + searchResponse.getHits().getTotalHits().value + + searchResponse.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) @@ -179,7 +179,7 @@ private void handleFilteringSearchResponse( } } - if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onResponse(new GetPipelineResponse(pipelineSources)); } else { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index c594c9f553164..216f82552353b 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.logging.DeprecationCategory; @@ -291,7 +292,10 @@ public Query regexpQuery( return new MatchNoDocsQuery(); } - final Automaton automaton = new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates); + final Automaton automaton = Operations.determinize( + new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ); final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); if (runAutomaton.run(this.value)) { return new MatchAllDocsQuery(); diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index e52237f4d507e..3a50cc8143485 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -242,11 +242,8 @@ public int docValueCount() { @Override public long nextOrd() { - if (ordsForThisDoc.hasNext()) { - return ordsForThisDoc.next(); - } else { - return NO_MORE_ORDS; - } + assert ordsForThisDoc.hasNext(); + return ordsForThisDoc.next(); } @Override diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java index c29e4513562fc..04599549cc3cc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java @@ -77,11 +77,7 @@ private CollectionBasedSortedSetDocValues(List docValues) { @Override public long nextOrd() { - currentOrd++; - if (currentOrd >= docValues.size()) { - return NO_MORE_ORDS; - } - return currentOrd; + return ++currentOrd; } @Override diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 303b94ec655dc..e8fd0da496bbe 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -651,7 +651,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio List fields = new ArrayList<>(); if (indexed && hasDocValues) { - fields.add(new LongField(fieldType().name(), numericValue)); + fields.add(new LongField(fieldType().name(), numericValue, Field.Store.NO)); } else if (hasDocValues) { fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); } else if (indexed) { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 00532d95574c0..4f42103bc4541 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.Locale; @@ -213,9 +212,9 @@ static CompiledAutomaton prefixAutomaton(String versionPrefix, boolean caseInsen a = Operations.concatenate(a, Automata.makeAnyBinary()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); + a = Operations.determinize(a, 0); - return new CompiledAutomaton(a, null, true, 0, true); + return new CompiledAutomaton(a, false, true, true); } static class EncodedVersion { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 387a49a29dc23..1e5ecf19bdf81 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -40,11 +40,11 @@ class VersionFieldWildcardQuery extends AutomatonQuery { private static final byte WILDCARD_CHAR = '?'; VersionFieldWildcardQuery(Term term, boolean caseInsensitive) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true); + super(term, toAutomaton(term, caseInsensitive), true); } VersionFieldWildcardQuery(Term term, boolean caseInsensitive, RewriteMethod rewriteMethod) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true, rewriteMethod); + super(term, toAutomaton(term, caseInsensitive), true, rewriteMethod); } private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive) { @@ -114,7 +114,7 @@ private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive if (containsPreReleaseSeparator == false) { automata.add(Operations.optional(Automata.makeChar(VersionEncoder.NO_PRERELEASE_SEPARATOR_BYTE))); } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java index 17e1d70cbb471..01f0fdb256551 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java @@ -47,7 +47,8 @@ public VersionStringDocValuesField(SortedSetDocValues input, String name) { public void setNextDocId(int docId) throws IOException { count = 0; if (input.advanceExact(docId)) { - for (long ord = input.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = input.nextOrd()) { + for (int i = 0; i < input.docValueCount(); i++) { + long ord = input.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index b49b4500ce7b7..6bf2917c601ac 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -187,7 +187,8 @@ public Query regexpQuery( matchFlags, DEFAULT_PROVIDER, maxDeterminizedStates, - method == null ? CONSTANT_SCORE_REWRITE : method + method == null ? CONSTANT_SCORE_REWRITE : method, + true ) { @Override diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java index 94d8a144b0bd6..c89d1f8493b6b 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java @@ -117,7 +117,7 @@ public void testPrefixQuery() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); } ); @@ -134,7 +134,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("1.3.567#12", hits[0].getSortValues()[0]); assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); @@ -150,7 +150,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); var hits = response.getHits().getHits(); assertEquals("1.0.0", hits[0].getSortValues()[0]); assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); @@ -179,7 +179,7 @@ public void testRegexQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -187,21 +187,21 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } ); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); // test case sensitivity / insensitivity assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -211,7 +211,7 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -234,7 +234,7 @@ public void testFuzzyQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); @@ -288,7 +288,7 @@ public void testWildcardQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -297,7 +297,7 @@ public void testWildcardQuery() throws Exception { private void checkWildcardQuery(String indexName, String query, String... expectedResults) { assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + assertEquals(expectedResults.length, response.getHits().getTotalHits().value()); for (int i = 0; i < expectedResults.length; i++) { String expected = expectedResults[i]; Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); @@ -321,7 +321,7 @@ public void testStoreMalformed() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); @@ -359,7 +359,7 @@ public void testStoreMalformed() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("2.2.0", hits[0].getSortValues()[0]); assertEquals("", hits[1].getSortValues()[0]); @@ -437,36 +437,36 @@ public void testMultiValues() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("2", response.getHits().getAt(1).getId()); assertEquals("3", response.getHits().getAt(2).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2", response.getHits().getAt(0).getId()); }); // range assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), - response -> assertEquals(3, response.getHits().getTotalHits().value) + response -> assertEquals(3, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8c245a4543abe..39519dc7931d0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -345,7 +345,7 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertResponse( prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false).setTrackTotalHits(true).setSize(10000), stateDocsResponse -> { - assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(5L)); int nonExistingJobDocsCount = 0; List nonExistingJobExampleIds = new ArrayList<>(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 2e096f3262cb6..9864c88d1405c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -372,7 +372,7 @@ protected long countForecastDocs(String jobId, String forecastId) { .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) ), - searchResponse -> count.set(searchResponse.getHits().getTotalHits().value) + searchResponse -> count.set(searchResponse.getHits().getTotalHits().value()) ); return count.get(); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 94bc3150cb12e..5f82d996c87fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -77,7 +77,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t ++numStateRecords; } } - assertThat(stateDocsResponse1.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse1.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } @@ -117,7 +117,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t } } - assertThat(stateDocsResponse2.getHits().getTotalHits().value, equalTo(3L)); + assertThat(stateDocsResponse2.getHits().getTotalHits().value(), equalTo(3L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(2)); @@ -154,7 +154,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws ++numStateRecords; } } - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index f5d0b23b437f3..8a6499ec3bb6a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -164,7 +164,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws + testDocsWithEmptyFeatureImportance + "] test docs with empty feature importance" + " from " - + sourceData.getHits().getTotalHits().value + + sourceData.getHits().getTotalHits().value() + " hits.\n" + badDocuments, trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 260a5dea0a3c1..388583f6f8656 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -295,7 +295,7 @@ private Quantiles getQuantiles(String jobId) throws Exception { prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 8fbad7ccd3877..1505d374dfa08 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -396,7 +396,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti } assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value == docCount) { + if (searchResponse.getHits().getTotalHits().value() == docCount) { long seenCount = SearchResponseUtils.getTotalHitsValue( prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) @@ -404,7 +404,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); assertThat(seenCount, lessThan((long) docCount)); } else { - logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); + logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value(), docCount); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index c15750de3b336..edc851def4468 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -77,7 +77,7 @@ public void testCountCorrelation() { .setSize(0) .setTrackTotalHits(true), percentilesSearch -> { - long totalHits = percentilesSearch.getHits().getTotalHits().value; + long totalHits = percentilesSearch.getHits().getTotalHits().value(); Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); Tuple aggs = buildRangeAggAndSetExpectations( percentiles, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index 8fddfa47c377c..139d1b074c7b2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -192,7 +192,7 @@ private boolean doesLocalAuditMessageExist(String message) { .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); try { - return response.getHits().getTotalHits().value > 0; + return response.getHits().getTotalHits().value() > 0; } finally { response.decRef(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 17fe20c5115ff..dfb960794537b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -767,7 +767,7 @@ private static DataCounts getDataCountsFromIndex(String jobId) throws IOExceptio prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { setOnce.set(new DataCounts(jobId)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a47b67e490851..210973f2601d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -189,7 +189,7 @@ private void initChunkedBucketSearcher( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); if (totalHits > 0) { InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6aaa1e50f2e8a..d676e6cc9d065 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -415,7 +415,7 @@ private void checkModelIdAgainstTags(String modelId, ActionListener listen ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_MODEL_ID_AND_TAGS_UNIQUE, modelId)) ); @@ -443,7 +443,7 @@ private void checkTagsAgainstModelIds(List tags, ActionListener li ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_TAGS_AND_MODEL_IDS_UNIQUE, tags)) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2ec460a08caf9..759538b4cdc63 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -433,7 +433,7 @@ private static void checkDestIndexIsEmptyIfExists( TransportSearchAction.TYPE, destEmptySearch, ActionListener.wrap(searchResponse -> { - if (searchResponse.getHits().getTotalHits().value > 0) { + if (searchResponse.getHits().getTotalHits().value() > 0) { listener.onFailure(ExceptionsHelper.badRequestException("dest index [{}] must be empty", destIndex)); } else { listener.onResponse(startContext); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index f0e03a1e94973..7c41dbd463413 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -62,7 +62,7 @@ public static DataExtractor.DataSummary getDataSummary(SearchResponse searchResp } else { Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 20da61a3d6910..7829adb395675 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -226,7 +226,7 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerdelegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -259,7 +259,7 @@ public void findDatafeedsByJobIds( listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index c890ab599c380..315d2249d00cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -377,7 +377,7 @@ public DataSummary collectDataSummary() { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); try { - long rows = searchResponse.getHits().getTotalHits().value; + long rows = searchResponse.getHits().getTotalHits().value(); LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); return new DataSummary(rows, organicFeatures.length + processedFeatures.length); } finally { @@ -396,7 +396,7 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio TransportSearchAction.TYPE, searchRequestBuilder.request(), dataSummaryActionListener.delegateFailureAndWrap( - (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value(), numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index dfcc12d98be41..64cf493028ad1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -169,7 +169,7 @@ private InferenceState restoreInferenceState() { ); try { Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + long processedTestDocCount = searchResponse.getHits().getTotalHits().value(); Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); if (lastIncrementalId != null) { LOGGER.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 482e82f9ec303..fdd4bdd120f6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -115,7 +115,7 @@ private void searchIfTestDocsExist(ActionListener listener) { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index 3ef2affa5d399..0b3dd573deaae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -70,7 +70,7 @@ private TrainTestSplitter createSingleClassSplitter(Regression regression) { regression.getDependentVariable(), regression.getTrainingPercent(), regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value + searchResponse.getHits().getTotalHits().value() ); } finally { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index f56c589aea19a..c4396c4f9d2c8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -196,7 +196,7 @@ private void doSearch( numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + || searchResponse.getHits().getTotalHits().value() == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index f493c735d87ea..ff5f37427b18f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -1008,7 +1008,7 @@ public void expandIds( ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - long totalHitCount = response.getHits().getTotalHits().value + foundResourceIds.size(); + long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size(); Set foundFromDocs = new HashSet<>(); for (SearchHit hit : response.getHits().getHits()) { Map docSource = hit.getSourceAsMap(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 8493513f40bd6..df9a187f59616 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -683,7 +683,7 @@ public void groupExists(String groupId, ActionListener listener) { ML_ORIGIN, searchRequest, ActionListener.wrap( - response -> listener.onResponse(response.getHits().getTotalHits().value > 0), + response -> listener.onResponse(response.getHits().getTotalHits().value() > 0), listener::onFailure ), client::search diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b9cc1902b7ab6..0f3abe3ab8c20 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -352,7 +352,7 @@ public void deleteJobDocuments( } } SearchResponse searchResponse = item.getResponse(); - if (searchResponse.getHits().getTotalHits().value > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { + if (searchResponse.getHits().getTotalHits().value() > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { needToRunDBQTemp = true; } else { indicesToDelete.add(indexNames.get()[i]); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f9e4e62e4e3bc..51b3e0b55d75b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -870,7 +870,7 @@ public void buckets( throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), Bucket.RESULTS_FIELD); if (query.isExpand()) { Iterator bucketsToExpand = buckets.results() @@ -1086,7 +1086,7 @@ public void categoryDefinitions( } QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), CategoryDefinition.RESULTS_FIELD ); handler.accept(result); @@ -1143,7 +1143,7 @@ public void records( } QueryPage queryPage = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), AnomalyRecord.RESULTS_FIELD ); handler.accept(queryPage); @@ -1207,7 +1207,7 @@ public void influencers( } QueryPage result = new QueryPage<>( influencers, - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Influencer.RESULTS_FIELD ); handler.accept(result); @@ -1375,7 +1375,7 @@ private void modelSnapshots( QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), ModelSnapshot.RESULTS_FIELD ); handler.accept(result); @@ -1411,7 +1411,7 @@ public QueryPage modelPlot(String jobId, int from, int size) { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1444,7 +1444,7 @@ public QueryPage categorizerStats(String jobId, int from, int } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1700,7 +1700,7 @@ public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener(events, response.getHits().getTotalHits().value, ScheduledEvent.RESULTS_FIELD)); + handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value(), ScheduledEvent.RESULTS_FIELD)); } catch (Exception e) { handler.onFailure(e); } @@ -1901,7 +1901,7 @@ public void calendars(CalendarQueryBuilder queryBuilder, ActionListener(calendars, response.getHits().getTotalHits().value, Calendar.RESULTS_FIELD)); + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value(), Calendar.RESULTS_FIELD)); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 886c19a65a4d0..194759c026a30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -168,7 +168,7 @@ private List findForecastsToDelete(SearchResponse searchResponse) List forecastsToDelete = new ArrayList<>(); SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits().value > MAX_FORECASTS) { + if (hits.getTotalHits().value() > MAX_FORECASTS) { LOGGER.info("More than [{}] forecasts were found. This run will only delete [{}] of them", MAX_FORECASTS, MAX_FORECASTS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 86488a647baa1..ef6087f021e9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -111,7 +111,7 @@ private SearchResponse initScroll() { ); SearchResponse searchResponse = client.search(searchRequest).actionGet(); - totalHits = searchResponse.getHits().getTotalHits().value; + totalHits = searchResponse.getHits().getTotalHits().value(); scrollId = searchResponse.getScrollId(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index f63f6e0549179..802bcaf3b342e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -110,7 +110,7 @@ public Deque next() { SearchResponse searchResponse = doSearch(searchAfterFields()); try { if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + totalHits.set(searchResponse.getHits().getTotalHits().value()); } return mapHits(searchResponse); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java index 3d17d8dd23ff6..13cf6d87728a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java @@ -166,8 +166,8 @@ protected void doAssertLuceneQuery(SparseVectorQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 8da6fc843614e..00d50e0d0d7bb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -139,8 +139,8 @@ protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index daea70abd29e3..7ddaa53a59914 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -149,7 +149,7 @@ public void testMonitoringBulk() throws Exception { assertResponse(client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"), response -> { // exactly 3 results are expected - assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); + assertThat("No monitoring documents yet", response.getHits().getTotalHits().value(), equalTo(3L)); final List> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) @@ -165,7 +165,7 @@ public void testMonitoringBulk() throws Exception { assertCheckedResponse(client().prepareSearch(monitoringIndex), response -> { final SearchHits hits = response.getHits(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 93e055b58ddc3..d68395ef7656f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public void testExport() throws Exception { assertResponse( prepareSearch(".monitoring-*"), - response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)) + response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value())) ); }); @@ -260,7 +260,7 @@ private void checkMonitoringDocs() { DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> { - assertThat(rsp.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L)); for (SearchHit hit : rsp.getHits().getHits()) { final Map source = hit.getSourceAsMap(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d6e15ea25c8e1..d382905c1c9c2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -293,12 +293,14 @@ private void assertNoWatchesExist() { .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : response.getHits().getHits()) { invalidWatches.add(ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap())); } - fail("Found [" + response.getHits().getTotalHits().value + "] invalid watches when none were expected: " + invalidWatches); + fail( + "Found [" + response.getHits().getTotalHits().value() + "] invalid watches when none were expected: " + invalidWatches + ); } }); } diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 803c7f410c41d..71f788727aa23 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -97,7 +97,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna .getAsVersionId( "version", IndexVersion::fromId, - IndexVersion.fromId(randomBoolean() ? 5000099 : 6000099) + IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) ) ) ) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index d1455eaa2f1c4..18adebb145f98 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { @@ -305,7 +305,7 @@ private static void parseSegmentInfos(Directory directory, DataInput input, OldS byte[] segmentID = new byte[StringHelper.ID_LENGTH]; input.readBytes(segmentID, 0, segmentID.length); Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.DEFAULT); info.setCodec(codec); totalDocs += info.maxDoc(); long delGen = CodecUtil.readBELong(input); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 25b4b685ac50f..3ed8fc26ac937 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; -import org.apache.lucene.backward_codecs.lucene70.Lucene70Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsConsumer; @@ -27,7 +26,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec; import java.io.IOException; import java.util.ArrayList; @@ -101,6 +99,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), + fieldInfo.docValuesSkipIndexType(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), @@ -119,9 +118,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { } public static SegmentInfo wrap(SegmentInfo segmentInfo) { - // special handling for Lucene70Codec (which is currently bundled with Lucene) - // Use BWCLucene70Codec instead as that one extends BWCCodec (similar to all other older codecs) - final Codec codec = segmentInfo.getCodec() instanceof Lucene70Codec ? new BWCLucene70Codec() : segmentInfo.getCodec(); + final Codec codec = segmentInfo.getCodec(); final SegmentInfo segmentInfo1 = new SegmentInfo( segmentInfo.dir, // Use Version.LATEST instead of original version, otherwise SegmentCommitInfo will bark when processing (N-1 limitation) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 5a9b1bb252308..c7abed7d69a59 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -182,10 +182,7 @@ public Number next() { try { if (nextDocID > values.docID()) { if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - ordCount = 0; - while (values.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - ordCount++; - } + ordCount = values.docValueCount(); } } int result; @@ -225,6 +222,7 @@ public Iterator iterator() { return new Iterator() { private boolean nextIsSet; + private int currentIndex = 0; private long nextOrd; private void setNext() { @@ -232,17 +230,22 @@ private void setNext() { if (nextIsSet == false) { if (values.docID() == -1) { values.nextDoc(); + currentIndex = 0; } while (true) { if (values.docID() == DocIdSetIterator.NO_MORE_DOCS) { nextOrd = -1; break; } - nextOrd = values.nextOrd(); - if (nextOrd != -1) { - break; + if (currentIndex < values.docValueCount()) { + nextOrd = values.nextOrd(); + currentIndex++; + if (nextOrd != -1) { + break; + } } values.nextDoc(); + currentIndex = 0; } nextIsSet = true; } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java index 21b6818bd5613..80236f3847e12 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java @@ -53,7 +53,7 @@ public int nextDoc() { while (docID < maxDoc) { values.setDocument(docID); ord = values.nextOrd(); - if (ord != NO_MORE_ORDS) { + if (ord != LegacySortedSetDocValues.NO_MORE_ORDS) { return docID; } docID++; @@ -81,7 +81,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; values.setDocument(docID); ord = values.nextOrd(); - return ord != NO_MORE_ORDS; + return ord != LegacySortedSetDocValues.NO_MORE_ORDS; } @Override @@ -92,7 +92,7 @@ public long cost() { @Override public long nextOrd() { long result = ord; - if (result != NO_MORE_ORDS) { + if (result != LegacySortedSetDocValues.NO_MORE_ORDS) { ord = values.nextOrd(); } return result; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java index a567f25869407..007b398624d56 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java @@ -105,14 +105,14 @@ private static int encodedSize(PackedInts.Format format, int packedIntsVersion, for (int bpv = 1; bpv <= 32; ++bpv) { final FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(BLOCK_SIZE, bpv, acceptableOverheadRatio); - assert formatAndBits.format.isSupported(formatAndBits.bitsPerValue); - assert formatAndBits.bitsPerValue <= 32; - encodedSizes[bpv] = encodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - encoders[bpv] = PackedInts.getEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - decoders[bpv] = PackedInts.getDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); + assert formatAndBits.format().isSupported(formatAndBits.bitsPerValue()); + assert formatAndBits.bitsPerValue() <= 32; + encodedSizes[bpv] = encodedSize(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + encoders[bpv] = PackedInts.getEncoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + decoders[bpv] = PackedInts.getDecoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); iterations[bpv] = computeIterations(decoders[bpv]); - out.writeVInt(formatAndBits.format.getId() << 5 | (formatAndBits.bitsPerValue - 1)); + out.writeVInt(formatAndBits.format().getId() << 5 | (formatAndBits.bitsPerValue() - 1)); } } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 83fcb17449100..06002d2d10dee 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -103,6 +104,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm storePayloads, indexOptions, docValuesType, + DocValuesSkipIndexType.NONE, dvGen, attributes, 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java index 09147e821d9fb..607d9903abc87 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -1316,6 +1317,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { } } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + private SortedSetDocValues getSortedSetWithAddresses(FieldInfo field) throws IOException { final long valueCount = binaries.get(field.name).count; // we keep the byte[]s and list of ords on disk, these could be large diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java index f3ce3ea0755e1..43203caf571f1 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java @@ -63,14 +63,14 @@ public MetadataOnlyBKDReader(IndexInput metaIn) throws IOException { numLeaves = metaIn.readVInt(); assert numLeaves > 0; - minPackedValue = new byte[config.packedIndexBytesLength]; - maxPackedValue = new byte[config.packedIndexBytesLength]; - - metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength); - metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength); - final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim); - for (int dim = 0; dim < config.numIndexDims; dim++) { - if (comparator.compare(minPackedValue, dim * config.bytesPerDim, maxPackedValue, dim * config.bytesPerDim) > 0) { + minPackedValue = new byte[config.packedIndexBytesLength()]; + maxPackedValue = new byte[config.packedIndexBytesLength()]; + + metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength()); + metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength()); + final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim()); + for (int dim = 0; dim < config.numIndexDims(); dim++) { + if (comparator.compare(minPackedValue, dim * config.bytesPerDim(), maxPackedValue, dim * config.bytesPerDim()) > 0) { throw new CorruptIndexException( "minPackedValue " + new BytesRef(minPackedValue) @@ -104,17 +104,17 @@ public byte[] getMaxPackedValue() { @Override public int getNumDimensions() { - return config.numDims; + return config.numDims(); } @Override public int getNumIndexDimensions() { - return config.numIndexDims; + return config.numIndexDims(); } @Override public int getBytesPerDimension() { - return config.bytesPerDim; + return config.bytesPerDim(); } @Override diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0e689138acd8f..0100a8bd14635 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -32,7 +32,7 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70"); + private final DocValuesFormat defaultDVFormat = new Lucene70DocValuesFormat(); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -47,7 +47,11 @@ public PostingsFormat getPostingsFormatForField(String field) { }; public BWCLucene70Codec() { - super("BWCLucene70Codec"); + this("BWCLucene70Codec"); + } + + protected BWCLucene70Codec(String name) { + super(name); storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java new file mode 100644 index 0000000000000..75119247cdb13 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java @@ -0,0 +1,327 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RoaringDocIdSet; + +import java.io.DataInput; +import java.io.IOException; + +/** + * Disk-based implementation of a {@link DocIdSetIterator} which can return the index of the current + * document, i.e. the ordinal of the current document among the list of documents that this iterator + * can return. This is useful to implement sparse doc values by only having to encode values for + * documents that actually have a value. + * + *

    Implementation-wise, this {@link DocIdSetIterator} is inspired of {@link RoaringDocIdSet + * roaring bitmaps} and encodes ranges of {@code 65536} documents independently and picks between 3 + * encodings depending on the density of the range: + * + *

      + *
    • {@code ALL} if the range contains 65536 documents exactly, + *
    • {@code DENSE} if the range contains 4096 documents or more; in that case documents are + * stored in a bit set, + *
    • {@code SPARSE} otherwise, and the lower 16 bits of the doc IDs are stored in a {@link + * DataInput#readShort() short}. + *
    + * + *

    Only ranges that contain at least one value are encoded. + * + *

    This implementation uses 6 bytes per document in the worst-case, which happens in the case + * that all ranges contain exactly one document. + */ +final class IndexedDISI extends DocIdSetIterator { + + static final int MAX_ARRAY_LENGTH = (1 << 12) - 1; + + private static void flush(int block, FixedBitSet buffer, int cardinality, IndexOutput out) throws IOException { + assert block >= 0 && block < 65536; + out.writeShort((short) block); + assert cardinality > 0 && cardinality <= 65536; + out.writeShort((short) (cardinality - 1)); + if (cardinality > MAX_ARRAY_LENGTH) { + if (cardinality != 65536) { // all docs are set + for (long word : buffer.getBits()) { + out.writeLong(word); + } + } + } else { + BitSetIterator it = new BitSetIterator(buffer, cardinality); + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + out.writeShort((short) doc); + } + } + } + + static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { + int i = 0; + final FixedBitSet buffer = new FixedBitSet(1 << 16); + int prevBlock = -1; + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + final int block = doc >>> 16; + if (prevBlock != -1 && block != prevBlock) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + prevBlock = block; + i = 0; + } + buffer.set(doc & 0xFFFF); + i++; + prevBlock = block; + } + if (i > 0) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + } + // NO_MORE_DOCS is stored explicitly + buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); + flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); + } + + /** The slice that stores the {@link DocIdSetIterator}. */ + private final IndexInput slice; + + private final long cost; + + IndexedDISI(IndexInput in, long offset, long length, long cost) throws IOException { + this(in.slice("docs", offset, length), cost); + } + + // This constructor allows to pass the slice directly in case it helps reuse + // see eg. Lucene70 norms producer's merge instance + IndexedDISI(IndexInput slice, long cost) throws IOException { + this.slice = slice; + this.cost = cost; + } + + private int block = -1; + private long blockEnd; + private int nextBlockIndex = -1; + Method method; + + private int doc = -1; + private int index = -1; + + // SPARSE variables + boolean exists; + + // DENSE variables + private long word; + private int wordIndex = -1; + // number of one bits encountered so far, including those of `word` + private int numberOfOnes; + + // ALL variables + private int gap; + + @Override + public int docID() { + return doc; + } + + @Override + public int advance(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + if (block == targetBlock) { + if (method.advanceWithinBlock(this, target)) { + return doc; + } + readBlockHeader(); + } + boolean found = method.advanceWithinBlock(this, block); + assert found; + return doc; + } + + public boolean advanceExact(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target); + this.doc = target; + return found; + } + + private void advanceBlock(int targetBlock) throws IOException { + do { + slice.seek(blockEnd); + readBlockHeader(); + } while (block < targetBlock); + } + + private void readBlockHeader() throws IOException { + block = Short.toUnsignedInt(slice.readShort()) << 16; + assert block >= 0; + final int numValues = 1 + Short.toUnsignedInt(slice.readShort()); + index = nextBlockIndex; + nextBlockIndex = index + numValues; + if (numValues <= MAX_ARRAY_LENGTH) { + method = Method.SPARSE; + blockEnd = slice.getFilePointer() + (numValues << 1); + } else if (numValues == 65536) { + method = Method.ALL; + blockEnd = slice.getFilePointer(); + gap = block - index - 1; + } else { + method = Method.DENSE; + blockEnd = slice.getFilePointer() + (1 << 13); + wordIndex = -1; + numberOfOnes = index + 1; + } + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + public int index() { + return index; + } + + @Override + public long cost() { + return cost; + } + + enum Method { + SPARSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + disi.doc = disi.block | doc; + disi.exists = true; + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + if (target == disi.doc) { + return disi.exists; + } + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + if (doc != targetInBlock) { + disi.index--; + disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES); + break; + } + disi.exists = true; + return true; + } + } + disi.exists = false; + return false; + } + }, + DENSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + if (leftBits != 0L) { + disi.doc = target + Long.numberOfTrailingZeros(leftBits); + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return true; + } + + while (++disi.wordIndex < 1024) { + disi.word = disi.slice.readLong(); + if (disi.word != 0) { + disi.index = disi.numberOfOnes; + disi.numberOfOnes += Long.bitCount(disi.word); + disi.doc = disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word); + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return (leftBits & 1L) != 0; + } + }, + ALL { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.doc = target; + disi.index = target - disi.gap; + return true; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.index = target - disi.gap; + return true; + } + }; + + /** + * Advance to the first doc from the block that is equal to or greater than {@code target}. + * Return true if there is such a doc and false otherwise. + */ + abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException; + + /** + * Advance the iterator exactly to the position corresponding to the given {@code target} and + * return whether this document exists. + */ + abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java new file mode 100644 index 0000000000000..77de24b53069d --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +public class Lucene70Codec extends BWCLucene70Codec { + + public Lucene70Codec() { + super("Lucene70"); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java new file mode 100644 index 0000000000000..1d35a60235d35 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java @@ -0,0 +1,681 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicWriter; +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.EmptyDocValuesProducer; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.ByteBuffersIndexOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.MathUtil; +import org.apache.lucene.util.StringHelper; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; + +/** writer for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesConsumer extends DocValuesConsumer { + + IndexOutput data, meta; + final int maxDoc; + + /** expert: Creates a new writer */ + Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + boolean success = false; + try { + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + data = EndiannessReverserUtil.createOutput(state.directory, dataName, state.context); + CodecUtil.writeIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + meta = EndiannessReverserUtil.createOutput(state.directory, metaName, state.context); + CodecUtil.writeIndexHeader( + meta, + metaCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + maxDoc = state.segmentInfo.maxDoc(); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @Override + public void close() throws IOException { + boolean success = false; + try { + if (meta != null) { + meta.writeInt(-1); // write EOF marker + CodecUtil.writeFooter(meta); // write checksum + } + if (data != null) { + CodecUtil.writeFooter(data); // write checksum + } + success = true; + } finally { + if (success) { + IOUtils.close(data, meta); + } else { + IOUtils.closeWhileHandlingException(data, meta); + } + meta = data = null; + } + } + + @Override + public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.NUMERIC); + + writeValues(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + return DocValues.singleton(valuesProducer.getNumeric(field)); + } + }); + } + + private static class MinMaxTracker { + long min, max, numValues, spaceInBits; + + MinMaxTracker() { + reset(); + spaceInBits = 0; + } + + private void reset() { + min = Long.MAX_VALUE; + max = Long.MIN_VALUE; + numValues = 0; + } + + /** Accumulate a new value. */ + void update(long v) { + min = Math.min(min, v); + max = Math.max(max, v); + ++numValues; + } + + /** Update the required space. */ + void finish() { + if (max > min) { + spaceInBits += LegacyDirectWriter.unsignedBitsRequired(max - min) * numValues; + } + } + + /** Update space usage and get ready for accumulating values for the next block. */ + void nextBlock() { + finish(); + reset(); + } + } + + private long[] writeValues(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + int numDocsWithValue = 0; + MinMaxTracker minMax = new MinMaxTracker(); + MinMaxTracker blockMinMax = new MinMaxTracker(); + long gcd = 0; + Set uniqueValues = new HashSet<>(); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + + if (gcd != 1) { + if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { + // in that case v - minValue might overflow and make the GCD computation return + // wrong results. Since these extreme values are unlikely, we just discard + // GCD computation for them + gcd = 1; + } else if (minMax.numValues != 0) { // minValue needs to be set first + gcd = MathUtil.gcd(gcd, v - minMax.min); + } + } + + minMax.update(v); + blockMinMax.update(v); + if (blockMinMax.numValues == NUMERIC_BLOCK_SIZE) { + blockMinMax.nextBlock(); + } + + if (uniqueValues != null && uniqueValues.add(v) && uniqueValues.size() > 256) { + uniqueValues = null; + } + } + + numDocsWithValue++; + } + + minMax.finish(); + blockMinMax.finish(); + + final long numValues = minMax.numValues; + long min = minMax.min; + final long max = minMax.max; + assert blockMinMax.spaceInBits <= minMax.spaceInBits; + + if (numDocsWithValue == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithValue == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedNumeric(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeLong(numValues); + final int numBitsPerValue; + boolean doBlocks = false; + Map encode = null; + if (min >= max) { + numBitsPerValue = 0; + meta.writeInt(-1); + } else { + if (uniqueValues != null + && uniqueValues.size() > 1 + && LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1) < LegacyDirectWriter.unsignedBitsRequired( + (max - min) / gcd + )) { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1); + final Long[] sortedUniqueValues = uniqueValues.toArray(new Long[0]); + Arrays.sort(sortedUniqueValues); + meta.writeInt(sortedUniqueValues.length); + for (Long v : sortedUniqueValues) { + meta.writeLong(v); + } + encode = new HashMap<>(); + for (int i = 0; i < sortedUniqueValues.length; ++i) { + encode.put(sortedUniqueValues[i], i); + } + min = 0; + gcd = 1; + } else { + uniqueValues = null; + // we do blocks if that appears to save 10+% storage + doBlocks = minMax.spaceInBits > 0 && (double) blockMinMax.spaceInBits / minMax.spaceInBits <= 0.9; + if (doBlocks) { + numBitsPerValue = 0xFF; + meta.writeInt(-2 - NUMERIC_BLOCK_SHIFT); + } else { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired((max - min) / gcd); + if (gcd == 1 + && min > 0 + && LegacyDirectWriter.unsignedBitsRequired(max) == LegacyDirectWriter.unsignedBitsRequired(max - min)) { + min = 0; + } + meta.writeInt(-1); + } + } + } + + meta.writeByte((byte) numBitsPerValue); + meta.writeLong(min); + meta.writeLong(gcd); + long startOffset = data.getFilePointer(); + meta.writeLong(startOffset); + if (doBlocks) { + writeValuesMultipleBlocks(valuesProducer.getSortedNumeric(field), gcd); + } else if (numBitsPerValue != 0) { + writeValuesSingleBlock(valuesProducer.getSortedNumeric(field), numValues, numBitsPerValue, min, gcd, encode); + } + meta.writeLong(data.getFilePointer() - startOffset); + + return new long[] { numDocsWithValue, numValues }; + } + + private void writeValuesSingleBlock( + SortedNumericDocValues values, + long numValues, + int numBitsPerValue, + long min, + long gcd, + Map encode + ) throws IOException { + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numValues, numBitsPerValue); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + if (encode == null) { + writer.add((v - min) / gcd); + } else { + writer.add(encode.get(v)); + } + } + } + writer.finish(); + } + + private void writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException { + final long[] buffer = new long[NUMERIC_BLOCK_SIZE]; + final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance(); + int upTo = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + buffer[upTo++] = values.nextValue(); + if (upTo == NUMERIC_BLOCK_SIZE) { + writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer); + upTo = 0; + } + } + } + if (upTo > 0) { + writeBlock(buffer, upTo, gcd, encodeBuffer); + } + } + + private void writeBlock(long[] values, int length, long gcd, ByteBuffersDataOutput buffer) throws IOException { + assert length > 0; + long min = values[0]; + long max = values[0]; + for (int i = 1; i < length; ++i) { + final long v = values[i]; + assert Math.floorMod(values[i] - min, gcd) == 0; + min = Math.min(min, v); + max = Math.max(max, v); + } + if (min == max) { + data.writeByte((byte) 0); + data.writeLong(min); + } else { + final int bitsPerValue = LegacyDirectWriter.unsignedBitsRequired(max - min); + buffer.reset(); + assert buffer.size() == 0; + final LegacyDirectWriter w = LegacyDirectWriter.getInstance(buffer, length, bitsPerValue); + for (int i = 0; i < length; ++i) { + w.add((values[i] - min) / gcd); + } + w.finish(); + data.writeByte((byte) bitsPerValue); + data.writeLong(min); + data.writeInt(Math.toIntExact(buffer.size())); + buffer.copyTo(data); + } + } + + @Override + public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.BINARY); + + BinaryDocValues values = valuesProducer.getBinary(field); + long start = data.getFilePointer(); + meta.writeLong(start); + int numDocsWithField = 0; + int minLength = Integer.MAX_VALUE; + int maxLength = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + BytesRef v = values.binaryValue(); + int length = v.length; + data.writeBytes(v.bytes, v.offset, v.length); + minLength = Math.min(length, minLength); + maxLength = Math.max(length, maxLength); + } + assert numDocsWithField <= maxDoc; + meta.writeLong(data.getFilePointer() - start); + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getBinary(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + meta.writeInt(minLength); + meta.writeInt(maxLength); + if (maxLength > minLength) { + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + writer.add(addr); + values = valuesProducer.getBinary(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.binaryValue().length; + writer.add(addr); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues values = valuesProducer.getSorted(field); + int numDocsWithField = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + } + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSorted(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + if (values.getValueCount() <= 1) { + meta.writeByte((byte) 0); + meta.writeLong(0L); + meta.writeLong(0L); + } else { + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numDocsWithField, numberOfBitsPerOrd); + values = valuesProducer.getSorted(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + writer.add(values.ordValue()); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); + + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0; + TermsEnum iterator = values.termsEnum(); + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { + writer.add(data.getFilePointer() - start); + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + + data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + data.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + data.writeVInt(suffixLength - 16); + } + data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + writer.finish(); + meta.writeInt(maxLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } + + @Override + public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); + + long[] stats = writeValues(field, valuesProducer); + int numDocsWithField = Math.toIntExact(stats[0]); + long numValues = stats[1]; + assert numValues >= numDocsWithField; + + meta.writeInt(numDocsWithField); + if (numValues > numDocsWithField) { + long start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1L, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_SET); + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + int numDocsWithField = 0; + long numOrds = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + numOrds += values.docValueCount(); + } + + if (numDocsWithField == numOrds) { + meta.writeByte((byte) 0); + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); + + assert numDocsWithField != 0; + if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedSet(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numOrds, numberOfBitsPerOrd); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0; i < values.docValueCount(); i++) { + writer.add(values.nextOrd()); + } + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + + meta.writeInt(numDocsWithField); + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + values.nextOrd(); + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + + addTermsDict(values); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java new file mode 100644 index 0000000000000..76fce4cd15c93 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java @@ -0,0 +1,171 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.SmallFloat; + +import java.io.IOException; + +/** + * Lucene 7.0 DocValues format. + * + *

    Documents that have a value for the field are encoded in a way that it is always possible to + * know the ordinal of the current document in the set of documents that have a value. For instance, + * say the set of documents that have a value for the field is {1, 5, 6, 11}. When the + * iterator is on 6, it knows that this is the 3rd item of the set. This way, values + * can be stored densely and accessed based on their index at search time. If all documents in a + * segment have a value for the field, the index is the same as the doc ID, so this case is encoded + * implicitly and is very fast at query time. On the other hand if some documents are missing a + * value for the field then the set of documents that have a value is encoded into blocks. All doc + * IDs that share the same upper 16 bits are encoded into the same block with the following + * strategies: + * + *

      + *
    • SPARSE: This strategy is used when a block contains at most 4095 documents. The lower 16 + * bits of doc IDs are stored as {@link DataOutput#writeShort(short) shorts} while the upper + * 16 bits are given by the block ID. + *
    • DENSE: This strategy is used when a block contains between 4096 and 65535 documents. The + * lower bits of doc IDs are stored in a bit set. Advancing is performed using {@link + * Long#numberOfTrailingZeros(long) ntz} operations while the index is computed by + * accumulating the {@link Long#bitCount(long) bit counts} of the visited longs. + *
    • ALL: This strategy is used when a block contains exactly 65536 documents, meaning that the + * block is full. In that case doc IDs do not need to be stored explicitly. This is typically + * faster than both SPARSE and DENSE which is a reason why it is preferable to have all + * documents that have a value for a field using contiguous doc IDs, for instance by using + * {@link IndexWriterConfig#setIndexSort(org.apache.lucene.search.Sort) index sorting}. + *
    + * + *

    Then the five per-document value types (Numeric,Binary,Sorted,SortedSet,SortedNumeric) are + * encoded using the following strategies: + * + *

    {@link DocValuesType#NUMERIC NUMERIC}: + * + *

      + *
    • Delta-compressed: per-document integers written as deltas from the minimum value, + * compressed with bitpacking. For more information, see {@link LegacyDirectWriter}. + *
    • Table-compressed: when the number of unique values is very small (< 256), and when there + * are unused "gaps" in the range of values used (such as {@link SmallFloat}), a lookup table + * is written instead. Each per-document entry is instead the ordinal to this table, and those + * ordinals are compressed with bitpacking ({@link LegacyDirectWriter}). + *
    • GCD-compressed: when all numbers share a common divisor, such as dates, the greatest common + * denominator (GCD) is computed, and quotients are stored using Delta-compressed Numerics. + *
    • Monotonic-compressed: when all numbers are monotonically increasing offsets, they are + * written as blocks of bitpacked integers, encoding the deviation from the expected delta. + *
    • Const-compressed: when there is only one possible value, no per-document data is needed and + * this value is encoded alone. + *
    + * + *

    {@link DocValuesType#BINARY BINARY}: + * + *

      + *
    • Fixed-width Binary: one large concatenated byte[] is written, along with the fixed length. + * Each document's value can be addressed directly with multiplication ({@code docID * + * length}). + *
    • Variable-width Binary: one large concatenated byte[] is written, along with end addresses + * for each document. The addresses are written as Monotonic-compressed numerics. + *
    • Prefix-compressed Binary: values are written in chunks of 16, with the first value written + * completely and other values sharing prefixes. chunk addresses are written as + * Monotonic-compressed numerics. A reverse lookup index is written from a portion of every + * 1024th term. + *
    + * + *

    {@link DocValuesType#SORTED SORTED}: + * + *

      + *
    • Sorted: a mapping of ordinals to deduplicated terms is written as Prefix-compressed Binary, + * along with the per-document ordinals written using one of the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_SET SORTED_SET}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like SORTED. + *
    • SortedSet: a mapping of ordinals to deduplicated terms is written as Binary, an ordinal + * list and per-document index into this list are written using the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_NUMERIC SORTED_NUMERIC}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like NUMERIC. + *
    • SortedNumeric: a value list and per-document index into this list are written using the + * numeric strategies above. + *
    + * + *

    Files: + * + *

      + *
    1. .dvd: DocValues data + *
    2. .dvm: DocValues metadata + *
    + */ +public final class Lucene70DocValuesFormat extends DocValuesFormat { + + /** Sole Constructor */ + public Lucene70DocValuesFormat() { + super("Lucene70"); + } + + @Override + public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + @Override + public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { + return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + static final String DATA_CODEC = "Lucene70DocValuesData"; + static final String DATA_EXTENSION = "dvd"; + static final String META_CODEC = "Lucene70DocValuesMetadata"; + static final String META_EXTENSION = "dvm"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + // indicates docvalues type + static final byte NUMERIC = 0; + static final byte BINARY = 1; + static final byte SORTED = 2; + static final byte SORTED_SET = 3; + static final byte SORTED_NUMERIC = 4; + + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + static final int NUMERIC_BLOCK_SHIFT = 14; + static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; + + static final int TERMS_DICT_BLOCK_SHIFT = 4; + static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; + static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java new file mode 100644 index 0000000000000..5164a67c428b3 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java @@ -0,0 +1,1461 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader; +import org.apache.lucene.backward_codecs.packed.LegacyDirectReader; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongValues; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** reader for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesProducer extends DocValuesProducer { + private final Map numerics = new HashMap<>(); + private final Map binaries = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); + private final Map sortedNumerics = new HashMap<>(); + private final IndexInput data; + private final int maxDoc; + + static final long NO_MORE_ORDS = -1; + + /** expert: instantiates a new reader */ + Lucene70DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + this.maxDoc = state.segmentInfo.maxDoc(); + + int version = -1; + + // read in the entries from the metadata file. + try (ChecksumIndexInput in = EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)) { + Throwable priorE = null; + try { + version = CodecUtil.checkIndexHeader( + in, + metaCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(in, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(in, priorE); + } + } + + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + this.data = EndiannessReverserUtil.openInput(state.directory, dataName, state.context); + boolean success = false; + try { + final int version2 = CodecUtil.checkIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (version != version2) { + throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data); + } + + // NOTE: data file is too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + CodecUtil.retrieveChecksum(data); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this.data); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + byte type = meta.readByte(); + if (type == Lucene70DocValuesFormat.NUMERIC) { + numerics.put(info.name, readNumeric(meta)); + } else if (type == Lucene70DocValuesFormat.BINARY) { + binaries.put(info.name, readBinary(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED) { + sorted.put(info.name, readSorted(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_SET) { + sortedSets.put(info.name, readSortedSet(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_NUMERIC) { + sortedNumerics.put(info.name, readSortedNumeric(meta)); + } else { + throw new CorruptIndexException("invalid type: " + type, meta); + } + } + } + + private NumericEntry readNumeric(ChecksumIndexInput meta) throws IOException { + NumericEntry entry = new NumericEntry(); + readNumeric(meta, entry); + return entry; + } + + private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numValues = meta.readLong(); + int tableSize = meta.readInt(); + if (tableSize > 256) { + throw new CorruptIndexException("invalid table size: " + tableSize, meta); + } + if (tableSize >= 0) { + entry.table = new long[tableSize]; + for (int i = 0; i < tableSize; ++i) { + entry.table[i] = meta.readLong(); + } + } + if (tableSize < -1) { + entry.blockShift = -2 - tableSize; + } else { + entry.blockShift = -1; + } + entry.bitsPerValue = meta.readByte(); + entry.minValue = meta.readLong(); + entry.gcd = meta.readLong(); + entry.valuesOffset = meta.readLong(); + entry.valuesLength = meta.readLong(); + } + + private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { + BinaryEntry entry = new BinaryEntry(); + entry.dataOffset = meta.readLong(); + entry.dataLength = meta.readLong(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.minLength = meta.readInt(); + entry.maxLength = meta.readInt(); + if (entry.minLength < entry.maxLength) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + entry.termsDictBlockShift = meta.readInt(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; + entry.termsAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + + private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { + SortedNumericEntry entry = new SortedNumericEntry(); + readNumeric(meta, entry); + entry.numDocsWithField = meta.readInt(); + if (entry.numDocsWithField != entry.numValues) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + @Override + public void close() throws IOException { + data.close(); + } + + private static class NumericEntry { + long[] table; + int blockShift; + byte bitsPerValue; + long docsWithFieldOffset; + long docsWithFieldLength; + long numValues; + long minValue; + long gcd; + long valuesOffset; + long valuesLength; + } + + private static class BinaryEntry { + long dataOffset; + long dataLength; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + int minLength; + int maxLength; + long addressesOffset; + long addressesLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + } + + private static class TermsDictEntry { + long termsDictSize; + int termsDictBlockShift; + LegacyDirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + LegacyDirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + } + + private static class SortedEntry extends TermsDictEntry { + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + } + + private static class SortedSetEntry extends TermsDictEntry { + SortedEntry singleValueEntry; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + private static class SortedNumericEntry extends NumericEntry { + int numDocsWithField; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + @Override + public NumericDocValues getNumeric(FieldInfo field) throws IOException { + NumericEntry entry = numerics.get(field.name); + return getNumeric(entry); + } + + private abstract static class DenseNumericDocValues extends NumericDocValues { + + final int maxDoc; + int doc = -1; + + DenseNumericDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + } + + private abstract static class SparseNumericDocValues extends NumericDocValues { + + final IndexedDISI disi; + + SparseNumericDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + // empty + return DocValues.emptyNumeric(); + } else if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.bitsPerValue == 0) { + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // dense but split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new DenseNumericDocValues(maxDoc) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int block = doc >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(doc & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(doc)]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return mul * values.get(doc) + delta; + } + }; + } + } + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues); + if (entry.bitsPerValue == 0) { + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // sparse and split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new SparseNumericDocValues(disi) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int index = disi.index(); + final int block = index >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(disi.index())]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return mul * values.get(disi.index()) + delta; + } + }; + } + } + } + } + } + + private LongValues getNumericValues(NumericEntry entry) throws IOException { + if (entry.bitsPerValue == 0) { + return new LongValues() { + @Override + public long get(long index) { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + final int shift = entry.blockShift; + final long mul = entry.gcd; + final long mask = (1L << shift) - 1; + return new LongValues() { + long block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long get(long index) { + final long block = index >>> shift; + if (this.block != block) { + assert block > this.block : "Reading backwards is illegal: " + this.block + " < " + block; + int bitsPerValue; + do { + offset = blockEndOffset; + try { + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 ? LongValues.ZEROES : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new LongValues() { + @Override + public long get(long index) { + return table[(int) values.get(index)]; + } + }; + } else if (entry.gcd != 1) { + final long gcd = entry.gcd; + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) * gcd + minValue; + } + }; + } else if (entry.minValue != 0) { + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) + minValue; + } + }; + } else { + return values; + } + } + } + } + + private abstract static class DenseBinaryDocValues extends BinaryDocValues { + + final int maxDoc; + int doc = -1; + + DenseBinaryDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + doc = target; + return true; + } + } + + private abstract static class SparseBinaryDocValues extends BinaryDocValues { + + final IndexedDISI disi; + + SparseBinaryDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + } + + @Override + public BinaryDocValues getBinary(FieldInfo field) throws IOException { + BinaryEntry entry = binaries.get(field.name); + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptyBinary(); + } + + final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + + if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) doc * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + long startOffset = addresses.get(doc); + bytes.length = (int) (addresses.get(doc + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) disi.index() * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + final int index = disi.index(); + long startOffset = addresses.get(index); + bytes.length = (int) (addresses.get(index + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } + } + + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptySorted(); + } + + final LongValues ords; + if (entry.bitsPerValue == 0) { + ords = new LongValues() { + @Override + public long get(long index) { + return 0L; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + } + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedDocValues(entry, data) { + + int doc = -1; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public int ordValue() { + return (int) ords.get(doc); + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedDocValues(entry, data) { + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int ordValue() { + return (int) ords.get(disi.index()); + } + }; + } + } + + private abstract static class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private static class TermsDict extends BaseTermsEnum { + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << entry.termsDictBlockShift) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + if ((ord & blockMask) == 0L) { + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + } else { + final int token = Byte.toUnsignedInt(bytes.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += bytes.readVInt(); + } + if (suffixLength == 16) { + suffixLength += bytes.readVInt(); + } + term.length = prefixLength + suffixLength; + bytes.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + final long blockIndex = ord >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << entry.termsDictBlockShift) - 1; + do { + next(); + } while (this.ord < ord); + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >>> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> entry.termsDictBlockShift; + long blockHi = ordHi >>> entry.termsDictBlockShift; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> entry.termsDictBlockShift) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << entry.termsDictBlockShift; + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedNumericEntry entry = sortedNumerics.get(field.name); + if (entry.numValues == entry.numDocsWithField) { + return DocValues.singleton(getNumeric(entry)); + } + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + final LongValues values = getNumericValues(entry); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new SortedNumericDocValues() { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextValue() throws IOException { + return values.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new SortedNumericDocValues() { + + boolean set; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + @Override + public long nextValue() throws IOException { + set(); + return values.get(start++); + } + + @Override + public int docValueCount() { + set(); + return count; + } + + private void set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + } + } + }; + } + } + + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + final LongValues ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedSetDocValues(entry, data) { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextOrd() throws IOException { + if (start == end) { + return NO_MORE_ORDS; + } + return ords.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedSetDocValues(entry, data) { + + boolean set; + long start; + long end = 0; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + private boolean set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + return true; + } + return false; + } + + @Override + public long nextOrd() throws IOException { + if (set()) { + return ords.get(start++); + } else if (start == end) { + return NO_MORE_ORDS; + } else { + return ords.get(start++); + } + } + + @Override + public int docValueCount() { + set(); + return count; + } + }; + } + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(data); + } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) { + return null; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 6e5205d664f2d..0215e9f7ca4ab 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -6,5 +6,6 @@ # org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene62.Lucene62Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60Codec diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat index 2d46b4bca3d0c..8d24d86982da8 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat @@ -14,3 +14,4 @@ # limitations under the License. org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index f62ab9fbc4fee..0b72b96b446d4 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -18,7 +18,7 @@ public class OldCodecsAvailableTests extends ESTestCase { * to the next major Lucene version. */ @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) - @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") + @AwaitsFix(bugUrl = "muted until we add bwc codecs to support 7.x indices in Elasticsearch 9.0") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java index 304f7b0c934fb..59f5e5de1eff7 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java @@ -48,7 +48,9 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.test.ESTestCase; @@ -187,7 +189,11 @@ public void assertTerms(Terms leftTerms, Terms rightTerms, boolean deep) throws int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); - CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); + Automaton determinized = Operations.determinize( + new RegExp(re, RegExp.NONE).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); + CompiledAutomaton automaton = new CompiledAutomaton(determinized); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java index c819dca3ec6ff..1a2aca0d63bde 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java @@ -10,12 +10,12 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.GraalVMThreadsFilter; @ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) -public class Lucene54DocValuesFormatTests extends BaseDocValuesFormatTestCase { +public class Lucene54DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene54DocValuesFormat()); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java new file mode 100644 index 0000000000000..ce645feb854d1 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.elasticsearch.test.GraalVMThreadsFilter; + +@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) +public class Lucene70DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { + + private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene70DocValuesFormat()); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index 48673d2002170..f447f67b4cdd2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -179,7 +179,7 @@ private void searchProfilingEvents( .setQuery(request.getQuery()) .setTrackTotalHits(true) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); log.debug( "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", @@ -220,7 +220,7 @@ private void searchGenericEvents( .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); int requestedSampleCount = request.getSampleSize(); // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 if (sampleCount <= requestedSampleCount * 2L) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 9dd46e778fb9a..dbb4cf4dc6856 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -180,7 +180,7 @@ private void execute(ClusterState state, ActionListener { - boolean hasData = searchResponse.getHits().getTotalHits().value > 0; + boolean hasData = searchResponse.getHits().getTotalHits().value() > 0; listener.onResponse( new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, hasData) ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 8eac03d36371e..e4f5810ac89d3 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -51,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 528872ca9b4cf..41ae97ec5e4fd 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index fd6bd177e4c60..6703f1aeacbb5 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index b501967524a6b..29c471296b5d1 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -218,7 +218,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -355,7 +355,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -483,7 +483,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 7269d9c3e5e7f..ed26aa50ffa62 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -217,7 +217,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -356,7 +356,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -486,7 +486,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index edd5e557aadf0..c5978219d94d3 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -198,8 +198,8 @@ public void testRRFPagination() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, lessThanOrEqualTo(size)); for (int k = 0; k < Math.min(size, resp.getHits().getHits().length); k++) { assertThat(resp.getHits().getAt(k).getId(), equalTo(expectedDocIds.get(k + fDocs_to_fetch))); @@ -249,8 +249,8 @@ public void testRRFWithAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); @@ -308,8 +308,8 @@ public void testRRFWithCollapse() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -366,8 +366,8 @@ public void testRRFRetrieverWithCollapseAndAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -441,8 +441,8 @@ public void testMultipleRRFRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_7")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_6")); @@ -493,8 +493,8 @@ public void testRRFExplainWithNamedRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -564,8 +564,8 @@ public void testRRFExplainWithAnotherNestedRRF() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -733,14 +733,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ); assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(2)); // check that we use the rewritten vector to build the explain query assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf).explain(true)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(4)); } diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index 69c61fe3bca1f..b1358f11bf633 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -167,8 +167,8 @@ public void testRRFRetrieverWithNestedQuery() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(3L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat((double) resp.getHits().getAt(0).getScore(), closeTo(0.1742, 1e-4)); assertThat( diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 2a17a4a1152cf..8df4e3a8dbea5 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -120,7 +120,7 @@ private void assertPinnedPromotions(PinnedQueryBuilder pqb, LinkedHashSet { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, lessThanOrEqualTo((long) numRelevantDocs + pins.size())); // Check pins are sorted by increasing score, (unlike organic, there are no duplicate scores) @@ -193,7 +193,7 @@ public void testExhaustiveScoring() throws Exception { private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { assertResponse(prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, equalTo(2L)); }); } diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java index d9e65c385c610..2370a3dee6d03 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java @@ -79,12 +79,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { */ protected static class CappedBulkScorer extends BulkScorer { final BulkScorer bulkScorer; - final Weight weight; final float maxScore; - public CappedBulkScorer(BulkScorer bulkScorer, Weight weight, float maxScore) { + public CappedBulkScorer(BulkScorer bulkScorer, float maxScore) { this.bulkScorer = bulkScorer; - this.weight = weight; this.maxScore = maxScore; } @@ -125,15 +123,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight innerWeight = searcher.createWeight(query, scoreMode, boost); if (scoreMode.needsScores()) { return new CappedScoreWeight(this, innerWeight, maxScore) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - final BulkScorer innerScorer = innerWeight.bulkScorer(context); - if (innerScorer == null) { - return null; - } - return new CappedBulkScorer(innerScorer, this, maxScore); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); @@ -152,7 +141,13 @@ public Scorer get(long leadCost) throws IOException { return innerScorer; } } - return new CappedScorer(innerWeight, innerScorer, maxScore); + return new CappedScorer(innerScorer, maxScore); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final BulkScorer innerScorer = innerScorerSupplier.bulkScorer(); + return new CappedBulkScorer(innerScorer, maxScore); } @Override @@ -166,15 +161,6 @@ public long cost() { public Matches matches(LeafReaderContext context, int doc) throws IOException { return innerWeight.matches(context, doc); } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } }; } else { return innerWeight; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java index 6ad3b9ce4ef85..ccc90e8f671a6 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,8 +37,22 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new CappedScorer(this, innerWeight.scorer(context), maxScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); + if (innerScorerSupplier == null) { + return null; + } + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new CappedScorer(innerScorerSupplier.get(leadCost), maxScore); + } + + @Override + public long cost() { + return innerScorerSupplier.cost(); + } + }; } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java index 57b2b62b77f6d..67813588ba3be 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java @@ -9,15 +9,14 @@ import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import java.io.IOException; public class CappedScorer extends FilterScorer { private final float maxScore; - public CappedScorer(Weight weight, Scorer delegate, float maxScore) { - super(delegate, weight); + public CappedScorer(Scorer delegate, float maxScore) { + super(delegate); this.maxScore = maxScore; } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index faf41e7e655a8..eab73fbe5ad04 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -305,7 +305,7 @@ public void testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQuerying assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsWithinRange)); }); // test with SearchShardsAPI @@ -655,7 +655,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount)); assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount)); assertThat(searchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -736,7 +736,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() // a shard that's available in order to construct the search response assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); }); @@ -850,7 +850,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo SearchResponse response = client().search(request).actionGet(); logger.info( "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), response.getSuccessfulShards(), response.getFailedShards(), response.getSkippedShards(), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 7615723860cff..9888afdd16499 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -79,7 +79,7 @@ public void testSearchableSnapshotRelocationDoNotUseSnapshotBasedRecoveries() th ensureGreen(restoredIndexName); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mockLog.assertAllExpectationsMatched(); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index a3da932398fb1..1e76477378da2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -66,7 +66,7 @@ public void testRepositoryUsedBySearchableSnapshotCanBeUpdatedButNotUnregistered Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mountedIndices[i] = restoredIndexName; } @@ -183,7 +183,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -208,7 +208,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value()); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -240,7 +240,7 @@ public void testDeletionOfSnapshotSettingCannotBeUpdated() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 40b7e08936fa3..7eaf5d8f060c6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -179,7 +179,7 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { ) .setSize(0), res -> { - final long remainingEntriesInCache = res.getHits().getTotalHits().value; + final long remainingEntriesInCache = res.getHits().getTotalHits().value(); if (indicesToDelete.contains(mountedIndex)) { assertThat(remainingEntriesInCache, equalTo(0L)); } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index a21e3e6beabce..21e67212f1f51 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -549,7 +549,7 @@ public void run() { try (listeners) { executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { assert total.get() == 0L; - total.set(searchResponse.getHits().getTotalHits().value); + total.set(searchResponse.getHits().getTotalHits().value()); handleSearchResponse(searchResponse, refs); }); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index da08c6b38819b..a7fb5571995b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -173,9 +174,10 @@ long getCacheSize(ShardId shardId, SnapshotId snapshotId, Predicate predic final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final String cacheFileId = getValue(document, CACHE_ID_FIELD); if (predicate.test(snapshotCacheDir.resolve(cacheFileId))) { long size = buildCacheFileRanges(document).stream().mapToLong(ByteRange::length).sum(); @@ -423,9 +425,10 @@ static Map loadDocuments(Path directoryPath) throws IOExceptio for (LeafReaderContext leafReaderContext : indexReader.leaves()) { final LeafReader leafReader = leafReaderContext.reader(); final Bits liveDocs = leafReader.getLiveDocs(); + final StoredFields storedFields = leafReader.storedFields(); for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { - final Document document = leafReader.document(i); + final Document document = storedFields.document(i); logger.trace("loading document [{}]", document); documents.put(getValue(document, CACHE_ID_FIELD), document); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java index 81cf205c13dd2..4711043fff281 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.ReadAdvice; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; @@ -35,7 +36,7 @@ public class CachedBlobContainerIndexInput extends MetadataCachingIndexInput { * a complete part of the {@link #fileInfo} at once in the cache and should not be * used for anything else than what the {@link #prefetchPart(int, Supplier)} method does. */ - public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(); + public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.NORMAL); private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 41121453e41a4..4ee2bf7e65633 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -348,8 +348,8 @@ public static Tuple randomChecksumBytes(byte[] bytes) throws IOE * uses a different buffer size for them. */ public static IOContext randomIOContext() { - final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READ, IOContext.READONCE); - assert ioContext.context != IOContext.Context.MERGE; + final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READONCE); + assert ioContext.context() != IOContext.Context.MERGE; return ioContext; } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java index c97d6cb4cab08..eab6f1a629f36 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java @@ -179,7 +179,7 @@ public void testSupportsNoOpCommits() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) { @@ -226,7 +226,7 @@ public void testSupportsDeletes() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size()); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index e65c4a60f89d5..98df96eca7772 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -401,9 +401,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { false, // no prewarming in this test because we want to ensure that files are accessed on purpose (directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final long checksum; - try (IndexInput input = directory.openInput(fileName, Store.READONCE_CHECKSUM)) { - checksum = CodecUtil.checksumEntireFile(input); + final long expectedChecksum; + try (IndexInput input = directory.openInput(fileName, IOContext.READONCE)) { + expectedChecksum = CodecUtil.checksumEntireFile(input); } final long snapshotChecksum; @@ -418,9 +418,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { } assertThat( - "Expected checksum [" + checksum + "] but got [" + snapshotChecksum + ']', + "Expected checksum [" + expectedChecksum + "] but got [" + snapshotChecksum + ']', snapshotChecksum, - equalTo(checksum) + equalTo(expectedChecksum) ); assertThat( "File [" + fileName + "] should have been read from heap", diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 6ffa09dc1f265..6d9110b564862 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -224,7 +224,7 @@ protected void assertSearchResponseContainsEmptyResult(Response response) { assertOK(response); SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 767452e6fcae7..4b994ce82d92f 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -282,7 +282,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr searchResponse = SearchResponseUtils.parseSearchResponse(parser); } try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index d5e77c1694640..1602a097b1b08 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -362,7 +362,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr assertOK(response); final SearchResponse searchResponse = SearchResponseUtils.parseSearchResponse(responseAsParser(response)); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java index 8b18359fb8310..1345e275fab17 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java @@ -119,7 +119,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 69331fa448113..4cbd1cab21af9 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -419,7 +419,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 505b82b39b960..53c622898476a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -230,7 +230,7 @@ public void testAnonymousUserFromQueryClusterWorks() throws Exception { Arrays.stream(searchResponse5.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toList()), containsInAnyOrder(".security-7") ); - assertThat(searchResponse5.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(searchResponse5.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); } finally { searchResponse5.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java index 3871029b3b44b..6fa3ef1b4ef63 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java @@ -134,7 +134,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly final Request scrollRequest = new Request("GET", "/_search/scroll"); final String scrollId; try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics")); documentFieldValues.add(searchResponse.getHits().getHits()[0].getSourceAsMap().get("name")); scrollId = searchResponse.getScrollId(); @@ -153,7 +153,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly responseAsParser(performRequestWithRemoteMetricUser(scrollRequest)) ); try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat( Arrays.stream(scrollResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics") diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index b1a76a4559812..9a1d653132d2d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -81,7 +81,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), - multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)) + multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)) ); UpdateResponse updateResponse = client.prepareUpdate(expression, response.getId()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index b0572b265a45b..a5f827c2a4b53 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -437,7 +437,7 @@ private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set assertResponse(prepareSearch("alias" + role), searchResponse2 -> { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(searchResponse2.getHits().getTotalHits().value())); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c0866fa7ea694..87ca7d279c709 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -474,13 +474,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -495,13 +495,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); @@ -522,7 +522,7 @@ public void testMSearch() throws Exception { ), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -531,7 +531,7 @@ public void testMSearch() throws Exception { assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -898,7 +898,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field1") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field1")); @@ -914,7 +914,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field2") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field2")); @@ -929,7 +929,7 @@ public void testKnnSearch() throws Exception { .setQuery(query) .setSize(10), response -> { - assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getTotalHits().value()); assertEquals(10, response.getHits().getHits().length); } ); @@ -1265,7 +1265,7 @@ public void testScroll() throws Exception { .get(); do { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1325,7 +1325,7 @@ public void testReaderId() throws Exception { .setQuery(termQuery("field1", "value1")) .get(); assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 34eecd57b53d5..01020a428c318 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -208,7 +208,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -231,7 +231,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -254,7 +254,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index bffa53b1f4da6..66c8c0a5b1b52 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -956,10 +956,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } @@ -975,10 +975,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } @@ -993,11 +993,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1013,9 +1013,9 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); @@ -1029,12 +1029,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1051,12 +1051,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1073,12 +1073,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1095,11 +1095,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1132,7 +1132,7 @@ public void testScroll() throws Exception { .get(); do { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1191,7 +1191,7 @@ public void testPointInTimeId() throws Exception { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1281,7 +1281,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { user2SearchResponse.decRef(); @@ -1289,7 +1289,7 @@ public void testScrollWithQueryCache() { user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) ).prepareSearchScroll(user2SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); if (randomBoolean()) { // maybe reuse the scroll even if empty @@ -1309,7 +1309,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1319,7 +1319,7 @@ public void testScrollWithQueryCache() { user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); @@ -2042,7 +2042,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -2061,7 +2061,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index d4375d15e6a6d..7d99d5817bdc0 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -103,20 +103,20 @@ public void testSearchAndMSearch() throws Exception { indexRandom(true, prepareIndex(index).setSource(field, "bar")); assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { - final long hits = response.getHits().getTotalHits().value; + final long hits = response.getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); assertResponse( client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), - response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + response2 -> assertEquals(response2.getHits().getTotalHits().value(), hits) ); final long multiHits; MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); } finally { multiSearchResponse.decRef(); @@ -125,7 +125,7 @@ public void testSearchAndMSearch() throws Exception { singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), multiHits); } finally { multiSearchResponse.decRef(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index af54f71779f08..6f8ea0f103a56 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -312,7 +312,7 @@ public void testMultiNamesWorkCorrectly() { assertResponse( userAClient.prepareSearch("alias1").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); final ElasticsearchSecurityException e1 = expectThrows( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 78146e58e91e2..e178f4bf3eb6c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -343,7 +343,7 @@ private void testAddUserAndRoleThenAuth(String username, String roleName) { String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); @@ -366,7 +366,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -382,7 +382,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); } @@ -403,7 +403,7 @@ public void testCreateDeleteAuthenticate() { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index f34983f7f125c..0acc281dd8440 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -213,7 +213,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -231,7 +231,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -249,7 +249,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -267,7 +267,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -317,7 +317,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); } @@ -336,7 +336,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index eb7c5e5276c15..a4cadeb953e14 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -48,13 +48,13 @@ public void testScrollIsPerUser() throws Exception { indexRandom(true, docs); assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); if (randomBoolean()) { assertResponse( client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), response2 -> { - assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(numDocs, response2.getHits().getTotalHits().value()); assertEquals(1, response2.getHits().getHits().length); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 4b8fbfd41acdf..437fb76351176 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -856,7 +856,7 @@ private SuggestProfilesResponse.ProfileHit[] doSuggest(Set dataKeys, Str final SuggestProfilesRequest suggestProfilesRequest = new SuggestProfilesRequest(dataKeys, name, 10, hint); final SuggestProfilesResponse suggestProfilesResponse = client().execute(SuggestProfilesAction.INSTANCE, suggestProfilesRequest) .actionGet(); - assertThat(suggestProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + assertThat(suggestProfilesResponse.getTotalHits().relation(), is(TotalHits.Relation.EQUAL_TO)); return suggestProfilesResponse.getProfileHits(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c4cf3127b897c..03558e72fdca3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -2254,7 +2254,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryApiKeysResult.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d866bd2a9d229..74a9aa7291ba4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -179,7 +179,7 @@ public void queryUsers(SearchRequest searchRequest, ActionListener { - final long total = searchResponse.getHits().getTotalHits().value; + final long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No users found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryUserResults.EMPTY); @@ -214,7 +214,7 @@ void getUserCount(final ActionListener listener) { .setSize(0) .setTrackTotalHits(true) .request(), - listener.safeMap(response -> response.getHits().getTotalHits().value), + listener.safeMap(response -> response.getHits().getTotalHits().value()), client::search ) ); @@ -706,7 +706,7 @@ void getAllReservedUserInfo(ActionListener> listen @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits().value <= 10 + assert searchResponse.getHits().getTotalHits().value() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c773a6e3963f..fa6187798da25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -85,6 +85,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; @@ -550,7 +551,7 @@ public void validateIndexPermissionsAreSubset( Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); for (String alias : entry.getValue()) { Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); - if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + if (Automatons.subsetOf(newNamePermissions, existingPermissions) == false) { listener.onResponse(AuthorizationResult.deny()); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java index 40cb3ea4d9864..8ff535f3f6231 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.time.ZoneOffset; @@ -195,7 +195,7 @@ private void logDeprecatedPermission(RoleDescriptor roleDescriptor) { index.getName(), i -> IndexPrivilege.get(indexPrivileges).getAutomaton() ); - if (false == Operations.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { + if (false == Automatons.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { inferiorIndexNames.add(index.getName()); } } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 7c242fb07b681..4ae17a679d205 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -279,7 +279,7 @@ public void queryRoleDescriptors(SearchSourceBuilder searchSourceBuilder, Action TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No roles found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryRoleResult.EMPTY); @@ -731,28 +731,28 @@ public void onResponse(MultiSearchResponse items) { if (responses[0].isFailure()) { usageStats.put("size", 0); } else { - usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value); + usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value()); } if (responses[1].isFailure()) { usageStats.put("fls", false); } else { - usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[2].isFailure()) { usageStats.put("dls", false); } else { - usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[3].isFailure()) { usageStats.put("remote_indices", 0); } else { - usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value()); } if (responses[4].isFailure()) { usageStats.put("remote_cluster", 0); } else { - usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value()); } delegate.onResponse(usageStats); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347ceb833f64..b347c278aae08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -414,19 +414,19 @@ public void usageStats(ActionListener> listener) { logger.debug("error on counting total profiles", items[0].getFailure()); usage.put("total", 0L); } else { - usage.put("total", items[0].getResponse().getHits().getTotalHits().value); + usage.put("total", items[0].getResponse().getHits().getTotalHits().value()); } if (items[1].isFailure()) { logger.debug("error on counting enabled profiles", items[0].getFailure()); usage.put("enabled", 0L); } else { - usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value); + usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value()); } if (items[2].isFailure()) { logger.debug("error on counting recent profiles", items[0].getFailure()); usage.put("recent", 0L); } else { - usage.put("recent", items[2].getResponse().getHits().getTotalHits().value); + usage.put("recent", items[2].getResponse().getHits().getTotalHits().value()); } listener.onResponse(usage); }, listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 5ec76a8dc3d01..5cd8cba763d3d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -77,8 +77,8 @@ public void migrate(SecurityIndexManager indexManager, Client client, ActionList client.search(countRequest, ActionListener.wrap(response -> { // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + if (response.getHits().getTotalHits().value() > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value() + "] roles"); updateRolesByQuery(indexManager, client, filterQuery, listener); } else { listener.onResponse(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0a2c40d2a257a..a4d9dacd1a63d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -137,8 +137,8 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); - assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); descriptor = roles.get("role1.ab"); assertNotNull(descriptor); @@ -181,9 +181,9 @@ public void testParseFile() throws Exception { assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); assertTrue( - Operations.sameLanguage( + AutomatonTestUtil.sameLanguage( group.privilege().getAutomaton(), - MinimizationOperations.minimize( + Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ) @@ -236,7 +236,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -258,7 +258,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); @@ -279,7 +279,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index d42d45e430627..e5171a7c51650 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -281,7 +281,7 @@ public void testRetentionWhileSnapshotInProgress() throws Exception { completedSnapshotName, Strings.arrayToCommaDelimitedString(resp.getHits().getHits()) ); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); } ); }); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index df8dc54bb7490..405a9926e2e5f 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -1595,7 +1595,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { int docIdToMatch = randomIntBetween(0, docCount - 1); assertResponse(searchRequestBuilder.setQuery(QueryBuilders.termQuery("field", docIdToMatch)), searchResponse -> { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = searchResponse.getHits().getAt(0); Map source = searchHit.getSourceAsMap(); assertThat(source, is(notNullValue())); @@ -1613,7 +1613,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchResponse, long docCount) { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docCount)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docCount)); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); Map source = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 3c64d140e2b56..e7b9156d5fb66 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -129,7 +129,7 @@ public void testKnownIssueWithCellLeftOfDatelineTouchingPolygonOnRightOfDateline client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), innerResponse -> assertThat( "Bucket " + bucket.getKeyAsString(), - innerResponse.getHits().getTotalHits().value, + innerResponse.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); @@ -320,7 +320,7 @@ private void assertQuery(List buckets, BiFunction assertThat( "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index b4a3a07502abf..b4d7a472591bd 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -175,7 +175,7 @@ public void testStorePolygonDateLine() throws Exception { indexRandom(true, prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); BytesRef bytesRef = searchHit.field("shape").getValue(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 1c013aba52261..4f23b6de4c37d 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -247,7 +247,7 @@ public void testFieldAlias() { assertResponse( client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertTrue(response.getHits().getTotalHits().value > 0); + assertTrue(response.getHits().getTotalHits().value() > 0); } ); } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1ac6bf3b6fd31..e26066cd89c50 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -112,7 +112,7 @@ public void testIndexPointsFilterRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -123,7 +123,7 @@ public void testIndexPointsFilterRectangle() { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -138,7 +138,7 @@ public void testIndexPointsCircle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -154,7 +154,7 @@ public void testIndexPointsPolygon() { .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(searchHits.getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); } @@ -175,7 +175,7 @@ public void testIndexPointsMultiPolygon() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("3"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("3"))); @@ -191,7 +191,7 @@ public void testIndexPointsRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -232,7 +232,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 66f5597be543e..2713afc149e05 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -230,8 +230,8 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value(), equalTo(1L)); } } } @@ -261,8 +261,8 @@ public void testShapeQuery() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(centerPointQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(centerPointQuery, 1).totalHits.value(), equalTo(1L)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 06293df4f4559..411a4cda868f0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -223,7 +223,7 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) } var totalHits = response.getHits().getTotalHits(); - var hits = totalHits != null ? "hits " + totalHits.relation + " " + totalHits.value + ", " : ""; + var hits = totalHits != null ? "hits " + totalHits.relation() + " " + totalHits.value() + ", " : ""; logger.trace( "Got search response [{}{} aggregations: [{}], {} failed shards, {} skipped shards, " + "{} successful shards, {} total shards, took {}, timed out [{}]]", @@ -548,7 +548,7 @@ protected List initBucketExtractors(SearchResponse response) { List exts = new ArrayList<>(refs.size()); TotalHits totalHits = response.getHits().getTotalHits(); - ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value); + ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value()); for (QueryContainer.FieldInfo ref : refs) { exts.add(createExtractor(ref.extraction(), totalCount)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index 78976ea7e83c0..cf52a5f5d7126 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -76,7 +76,7 @@ public Object extract(Bucket bucket) { throw new SqlIllegalArgumentException("Cannot find an aggregation named {}", name); } - if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value == 0) { + if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value() == 0) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 8ee23e38f9ffe..0ba29fef8e06d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -123,7 +123,7 @@ protected void masterOperation( Arrays.toString(transformCountSuccess.getShardFailures()) ); } - long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value(); if (totalTransforms == 0) { var usage = new TransformFeatureSetUsage(transformsCountByState, Collections.emptyMap(), new TransformIndexerStats()); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index f49d5fc96f3ab..cd06a4cadaa37 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -84,7 +84,7 @@ public void sourceHasChanged(TransformCheckpoint lastCheckpoint, ActionListener< client, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value > 0L), listener::onFailure) + ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value() > 0L), listener::onFailure) ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index ffc4b48f9cc30..9d5175922c892 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -509,7 +509,7 @@ public void expandTransformIds( final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(request, foundConfigsListener.delegateFailureAndWrap((l, searchResponse) -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); // important: preserve order Set ids = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); Set configs = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); @@ -589,7 +589,7 @@ public void resetTransform(String transformId, ActionListener listener) .trackTotalHitsUpTo(1) ); executeAsyncWithOrigin(TransportSearchAction.TYPE, searchRequest, deleteListener.delegateFailureAndWrap((l, searchResponse) -> { - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { listener.onFailure( new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, transformId)) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 23bab56de5ec9..2de810b2b902d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -207,7 +207,7 @@ private SearchRequest buildSearchRequestForValidation(String logId, SourceConfig @Override public void getInitialProgressFromResponse(SearchResponse response, ActionListener progressListener) { - progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value, 0L, 0L)); + progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value(), 0L, 0L)); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 684e3a085405d..68b31d4f466b6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.geo.GeoPoint; @@ -560,7 +560,7 @@ static class GeoTileFieldCollector implements FieldCollector { @Override public int getMaxPageSize() { // this collector is limited by indices.query.bool.max_clause_count, default 1024 - return BooleanQuery.getMaxClauseCount(); + return IndexSearcher.getMaxClauseCount(); } @Override diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 706337768a299..5f7c6490e51f1 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -50,7 +50,7 @@ public void testCanUseAnyConcreteIndexName() throws Exception { assertBusy(() -> { assertResponse( prepareSearch(watchResultsIndex).setTrackTotalHits(true), - searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value, greaterThan(0)) + searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value(), greaterThan(0)) ); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index f1ad29607b5b8..7fa5365afa0ab 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -120,7 +120,7 @@ private void assertTotalHistoryEntries(String id, long expectedCount) throws Exc assertResponse( prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(oneOf(expectedCount, expectedCount + 1))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(oneOf(expectedCount, expectedCount + 1))) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 60867ba5d4410..4068c534013b9 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -122,7 +122,7 @@ public void testActionConditionWithHardFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -176,7 +176,7 @@ public void testActionConditionWithFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -236,7 +236,7 @@ public void testActionCondition() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 5b7ea39079f28..dac87eaa6f034 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -105,7 +105,7 @@ public void testEmailFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 97347de1ea23e..ffac36846414e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -102,7 +102,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7dde279fb90db..8dec5287ae607 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -54,7 +54,7 @@ public void testIndexActionFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 567d4acfa45e5..b268caa45f471 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -72,7 +72,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 5dc537fc259d9..5eaf27e7b2670 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -377,7 +377,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( lastResponse.set(searchResponse); assertThat( "could not find executed watch record for watch " + watchName, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed) ); if (assertConditionMet) { @@ -396,7 +396,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -452,7 +452,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long searchResponse -> { lastResponse.set(searchResponse); assertThat( - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded) ); } @@ -461,7 +461,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -497,7 +497,7 @@ protected void assertWatchWithMinimumActionsCount(final String watchName, final searchResponse -> { assertThat( "could not find executed watch record", - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(recordCount) ); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 99640d1ebc3ea..03f1e6cb57eb8 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -296,8 +296,8 @@ private void assertSingleExecutionAndCompleteWatchHistory(final long numberOfWat AtomicLong successfulWatchExecutions = new AtomicLong(); refresh(); assertResponse(prepareSearch("output"), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); - successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value); + assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(numberOfWatches))); + successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value()); }); // the watch history should contain entries for each triggered watch, which a few have been marked as not executed @@ -378,7 +378,7 @@ public void testWatchRecordSavedTwice() throws Exception { // the actual documents are in the output index refresh(); assertResponse(prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index e5f4091ca89eb..f3648580691cb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -55,7 +55,7 @@ public void testHistoryOnRejection() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { - assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); + assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); assertThat( "Did not find watcher history for rejected watch", Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 7ff293ed9b150..fbb1996a4cf42 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -69,7 +69,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(1L))) ); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4298f641cbdd2..e12805f3ace09 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -79,7 +79,7 @@ public void testWatchMetadata() throws Exception { } assertNotNull(searchResponse); try { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 92ac91a63e097..2ec6541275d04 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -139,13 +139,13 @@ public void testScriptTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); @@ -184,12 +184,12 @@ public void testSearchTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); } @@ -223,13 +223,13 @@ public void testChainTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 5389f34212270..0ea9b432d3b0f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -340,7 +340,7 @@ private Collection loadWatches(ClusterState clusterState) { throw new ElasticsearchException("Partial response while loading watches"); } - if (response.getHits().getTotalHits().value == 0) { + if (response.getHits().getTotalHits().value() == 0) { return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index 9a165112c41d1..327d345af864e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -42,7 +42,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; @@ -440,7 +439,7 @@ static CharacterRunAutomaton createAutomaton(List whiteListedHosts) { } Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); - whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + whiteListAutomaton = Operations.determinize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CharacterRunAutomaton(whiteListAutomaton); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 6775dca424bf1..dfa0c47493ed7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -156,7 +156,7 @@ public Collection findTriggeredWatches(Collection watches SearchResponse response = null; try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 9d6186e9c1c48..e6bd1b0efb95d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -91,7 +91,7 @@ SearchInput.Result doExecute(WatchExecutionContext ctx, WatcherSearchTemplateReq try { if (logger.isDebugEnabled()) { - logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); + logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value()); } final Payload payload; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java index 97ae29a26e68c..358a839e60ea5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java @@ -98,11 +98,11 @@ SearchRequest createSearchRequest(QueryWatchesAction.Request request) { } void transformResponse(SearchResponse searchResponse, ActionListener listener) { - assert searchResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert searchResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; List items = Arrays.stream(searchResponse.getHits().getHits()) .map(this::transformSearchHit) .toList(); - listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value, items)); + listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value(), items)); } QueryWatchesAction.Response.Item transformSearchHit(SearchHit searchHit) { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index 608e5f1972373..191775f46cd72 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -69,44 +70,56 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); - Scorer approxScorer = approxWeight.scorer(context); - if (approxScorer == null) { + ScorerSupplier approxScorerSupplier = approxWeight.scorerSupplier(context); + if (approxScorerSupplier == null) { // No matches to be had return null; } - DocIdSetIterator approxDisi = approxScorer.iterator(); - TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + + return new ScorerSupplier() { @Override - public boolean matches() throws IOException { - if (values.advanceExact(approxDisi.docID()) == false) { - // Can happen when approxQuery resolves to some form of MatchAllDocs expression - return false; - } - BytesRef arrayOfValues = values.binaryValue(); - bytes.reset(arrayOfValues.bytes); - bytes.setPosition(arrayOfValues.offset); - - int size = bytes.readVInt(); - for (int i = 0; i < size; i++) { - int valLength = bytes.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { - return true; + public Scorer get(long leadCost) throws IOException { + Scorer approxScorer = approxScorerSupplier.get(leadCost); + DocIdSetIterator approxDisi = approxScorer.iterator(); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + @Override + public boolean matches() throws IOException { + if (values.advanceExact(approxDisi.docID()) == false) { + // Can happen when approxQuery resolves to some form of MatchAllDocs expression + return false; + } + BytesRef arrayOfValues = values.binaryValue(); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); + + int size = bytes.readVInt(); + for (int i = 0; i < size; i++) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { + return true; + } + bytes.skipBytes(valLength); + } + return false; + } + + @Override + public float matchCost() { + // TODO: how can we compute this? + return 1000f; } - bytes.skipBytes(valLength); - } - return false; + }; + return new ConstantScoreScorer(score(), scoreMode, twoPhase); } @Override - public float matchCost() { - // TODO: how can we compute this? - return 1000f; + public long cost() { + return approxScorerSupplier.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); } @Override diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 7784e7ffdda12..f3b01bb898126 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; @@ -349,7 +348,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean } Automaton automaton = caseInsensitive ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern)) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); @@ -379,7 +378,6 @@ public Query regexpQuery( RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); Automaton a = regExp.toAutomaton(); a = Operations.determinize(a, maxDeterminizedStates); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all return existsQuery(context); } @@ -390,7 +388,7 @@ public Query regexpQuery( Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regex.toAutomaton(maxDeterminizedStates); + Automaton automaton = Operations.determinize(regex.toAutomaton(), maxDeterminizedStates); // We can accelerate execution with the ngram query return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); @@ -550,9 +548,9 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(clause.query()); if (q != null) { - if (clause.getOccur().equals(Occur.FILTER)) { + if (clause.occur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item // to mandatory (shoulds with 1 clause) causing false negatives // Dropping MUSTs increase false positives which are OK because are verified anyway. @@ -561,7 +559,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { break; } } - rewritten.add(q, clause.getOccur()); + rewritten.add(q, clause.occur()); } } return rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 4b9ccff6f526c..a1a01ebdcc590 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -41,6 +41,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -182,7 +183,7 @@ public void testTooBigKeywordField() throws IOException { Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(1L)); reader.close(); dir.close(); @@ -229,12 +230,12 @@ public void testTooBigQueryField() throws IOException { String queryString = randomABString((IndexSearcher.getMaxClauseCount() * 2) + 1); Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); // Test regexp query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); reader.close(); dir.close(); @@ -271,13 +272,13 @@ public void testTermAndPrefixQueryIgnoreWildcardSyntax() throws IOException { private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } public void testSearchResultsVersusKeywordField() throws IOException { @@ -390,8 +391,8 @@ public void testSearchResultsVersusKeywordField() throws IOException { TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, values.size() + 1, Sort.RELEVANCE); assertThat( keywordFieldQuery + "\n" + wildcardFieldQuery, - wildcardFieldTopDocs.totalHits.value, - equalTo(kwTopDocs.totalHits.value) + wildcardFieldTopDocs.totalHits.value(), + equalTo(kwTopDocs.totalHits.value()) ); HashSet expectedDocs = new HashSet<>(); @@ -497,7 +498,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { TopDocs kwTopDocs = searcher.search(keywordFieldQuery, 10, Sort.RELEVANCE); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.RELEVANCE); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(kwTopDocs.totalHits.value)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(kwTopDocs.totalHits.value())); HashSet expectedDocs = new HashSet<>(); for (ScoreDoc topDoc : kwTopDocs.scoreDocs) { @@ -642,7 +643,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { public void testQueryCachingEquality() throws IOException, ParseException { String pattern = "A*b*B?a"; // Case sensitivity matters when it comes to caching - Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); Automaton caseInSensitiveAutomaton = AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term("field", pattern)); BinaryDvConfirmedAutomatonQuery csQ = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), @@ -660,7 +661,10 @@ public void testQueryCachingEquality() throws IOException, ParseException { assertNotEquals(csQ.hashCode(), ciQ.hashCode()); // Same query should be equal - Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton( + new Term("field", pattern), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); BinaryDvConfirmedAutomatonQuery csQ2 = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), "field", @@ -880,11 +884,11 @@ private Query rewriteFiltersToMustsForComparisonPurposes(Query q) { if (q instanceof BooleanQuery bq) { BooleanQuery.Builder result = new BooleanQuery.Builder(); for (BooleanClause cq : bq.clauses()) { - Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.getQuery()); - if (cq.getOccur() == Occur.FILTER) { + Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.query()); + if (cq.occur() == Occur.FILTER) { result.add(rewritten, Occur.MUST); } else { - result.add(rewritten, cq.getOccur()); + result.add(rewritten, cq.occur()); } } return result.build(); @@ -1013,8 +1017,9 @@ protected String convertToRandomRegex(String randomValue) { } // Assert our randomly generated regex actually matches the provided raw input. - RegExp regex = new RegExp(result.toString()); - Automaton automaton = regex.toAutomaton(); + int includeDeprecatedComplement = RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT; + RegExp regex = new RegExp(result.toString(), includeDeprecatedComplement); + Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); assertTrue( diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index d61c143098fcb..f502683e42eb2 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -377,7 +377,7 @@ private void assertDocs( try { logger.info(searchResponse); // check hit count - assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocs, searchResponse.getHits().getTotalHits().value()); // check that _index is properly set assertTrue(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).allMatch(index::equals)); // check that all _ids are there @@ -404,7 +404,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals(id, searchResponse.getHits().getHits()[0].getId()); assertEquals(sourceForDoc(num), searchResponse.getHits().getHits()[0].getSourceAsString()); } finally { @@ -456,7 +456,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(typeCount, searchResponse.getHits().getTotalHits().value); + assertEquals(typeCount, searchResponse.getHits().getTotalHits().value()); for (SearchHit hit : searchResponse.getHits().getHits()) { DocumentField typeField = hit.field("_type"); assertNotNull(typeField); @@ -482,7 +482,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(0, searchResponse.getHits().getTotalHits().value); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); // When all shards are skipped, at least one of them is queried in order to provide a proper search response. assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index dddba9b7b0fba..02dc679152bf4 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -445,7 +445,7 @@ private List getAllTokenIds() throws IOException { final SearchHits searchHits = response.getHits(); assertThat( "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, + searchHits.getTotalHits().value(), lessThanOrEqualTo(searchSize) ); final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { From 0a399b572d6927241c0ffecf3a1a3a1b6849c7fb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 21 Oct 2024 22:53:49 +1100 Subject: [PATCH 35/67] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT #115213 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b7323bfc1de18..3b6a0d8cdae3d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -361,6 +361,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/115213 # Examples: # From 6a73f89d31722ede8fa4d71f68fe8187e00b28c7 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Mon, 21 Oct 2024 13:38:46 +0100 Subject: [PATCH 36/67] Use `equals()` to compare `String`-s over operators in `hasRemoteClusters()` (#115154) --- .../java/org/elasticsearch/action/search/SearchResponse.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 83ee6c216ad49..041b3ae73c1ee 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -787,7 +787,8 @@ public boolean hasClusterObjects() { * This will be false for local-cluster (non-CCS) only searches. */ public boolean hasRemoteClusters() { - return total > 1 || clusterInfo.keySet().stream().anyMatch(alias -> alias != RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + return total > 1 + || clusterInfo.keySet().stream().anyMatch(alias -> alias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false); } } From 6580bfbd2d514e65b837c8c69b00b615890b9954 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:12:11 +0200 Subject: [PATCH 37/67] Reduce repeated warning logs from AdaptiveAllocationsScalerService (#115089) --- .../AdaptiveAllocationsScalerService.java | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 9624d619ff20a..770e890512935 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; @@ -205,13 +206,11 @@ Collection observeDouble(Function scalers; private final Map lastScaleUpTimesMillis; - private volatile Scheduler.Cancellable cancellable; private final AtomicBoolean busy; - private final long scaleToZeroAfterNoRequestsSeconds; - private final Set deploymentIdsWithInFlightScaleFromZeroRequests = new ConcurrentSkipListSet<>(); + private final Map lastWarningMessages = new ConcurrentHashMap<>(); public AdaptiveAllocationsScalerService( ThreadPool threadPool, @@ -475,7 +474,8 @@ private ActionListener updateAssigm int numberOfAllocations ) { return ActionListener.wrap(updateResponse -> { - logger.debug("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); + lastWarningMessages.remove(deploymentId); + logger.info("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute( () -> inferenceAuditor.info( @@ -484,20 +484,24 @@ private ActionListener updateAssigm ) ); }, e -> { - logger.atLevel(Level.WARN) + Level level = e.getMessage().equals(lastWarningMessages.get(deploymentId)) ? Level.DEBUG : Level.WARN; + lastWarningMessages.put(deploymentId, e.getMessage()); + logger.atLevel(level) .withThrowable(e) .log("adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", deploymentId, numberOfAllocations); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.warning( - deploymentId, - Strings.format( - "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", + if (level == Level.WARN) { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.warning( deploymentId, - numberOfAllocations + Strings.format( + "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", + deploymentId, + numberOfAllocations + ) ) - ) - ); + ); + } }); } } From e0e34c3f5411daaadcbd470651a492d1d7cfc923 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:13:10 +1100 Subject: [PATCH 38/67] Mute org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests testEvaluateInManyThreads {TestCase=} #115227 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 3b6a0d8cdae3d..96b7d05f662ab 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -363,6 +363,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115135 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/115213 +- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests + method: testEvaluateInManyThreads {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/115227 # Examples: # From 2ff0afadae2881524c117bfb84a68e95fc0bc09a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:13:25 +1100 Subject: [PATCH 39/67] Mute org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests testEvaluateInManyThreads {TestCase=} #115228 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 96b7d05f662ab..70c5f3ced5273 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -366,6 +366,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115227 +- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests + method: testEvaluateInManyThreads {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/115228 # Examples: # From 183ad88104ec05ebdc0765a3794b69d2449753d4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:27:36 +1100 Subject: [PATCH 40/67] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} #115231 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 70c5f3ced5273..1dda90369ce76 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -369,6 +369,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115228 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} + issue: https://github.com/elastic/elasticsearch/issues/115231 # Examples: # From fd43adc78aff661b9a0b2435587f6d4b03009aa8 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 21 Oct 2024 15:43:47 +0200 Subject: [PATCH 41/67] ES|QL Add initial support for semantic_text field type (#113920) * Add initial support for semantic_text field type * Update docs/changelog/113920.yaml * More tests and fixes * Use mock inference service * Fix tests * Spotless * Fix mixed-cluster and multi-clusters tests * sort * Attempt another fix for bwc tests * Spotless * Fix merge * Attempt another fix * Don't load the inference-service-test plugin for mixed versions/clusters * Add more tests, address review comments * trivial * revert * post-merge fix block loader * post-merge fix compile * add mixed version testing * whitespace * fix MultiClusterSpecIT * add more fields to mapping * Revert mixed version testing * whitespace --------- Co-authored-by: ChrisHegarty Co-authored-by: Elastic Machine --- docs/changelog/113920.yaml | 5 + .../esql/core/plugin/EsqlCorePlugin.java | 1 + .../xpack/esql/core/type/DataType.java | 12 +- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 5 + .../xpack/esql/ccq/MultiClusterSpecIT.java | 5 + .../esql/qa/server/multi-node/build.gradle | 1 + .../xpack/esql/qa/multi_node/EsqlSpecIT.java | 2 +- .../esql/qa/server/single-node/build.gradle | 1 + .../xpack/esql/qa/single_node/EsqlSpecIT.java | 2 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 20 +- .../elasticsearch/xpack/esql/CsvAssert.java | 6 +- .../xpack/esql/CsvTestUtils.java | 1 + .../xpack/esql/CsvTestsDataLoader.java | 132 +++++++++++-- .../xpack/esql/EsqlTestUtils.java | 2 +- .../main/resources/mapping-semantic_text.json | 73 ++++++++ .../src/main/resources/semantic_text.csv | 4 + .../src/main/resources/semantic_text.csv-spec | 175 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 6 +- .../xpack/esql/action/PositionToXContent.java | 2 +- .../xpack/esql/action/ResponseValueUtils.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 4 +- .../scalar/conditional/CaseTests.java | 2 +- .../mapper/SemanticTextFieldMapper.java | 9 + .../test/esql/40_unsupported_types.yml | 49 +++++ 26 files changed, 490 insertions(+), 35 deletions(-) create mode 100644 docs/changelog/113920.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec diff --git a/docs/changelog/113920.yaml b/docs/changelog/113920.yaml new file mode 100644 index 0000000000000..4699ae6d7dd65 --- /dev/null +++ b/docs/changelog/113920.yaml @@ -0,0 +1,5 @@ +pr: 113920 +summary: Add initial support for `semantic_text` field type +area: Search +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index 639d8ed68d0a3..d84a471815a9a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -14,4 +14,5 @@ public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { public static final FeatureFlag DATE_NANOS_FEATURE_FLAG = new FeatureFlag("esql_date_nanos"); + public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 12699ca3ee720..5041c96128a1e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -194,7 +194,14 @@ public enum DataType { * inside alongside time-series aggregations. These fields are not parsable from the * mapping and should be hidden from users. */ - PARTIAL_AGG(builder().esType("partial_agg").unknownSize()); + PARTIAL_AGG(builder().esType("partial_agg").unknownSize()), + /** + * String fields that are split into chunks, where each chunk has attached embeddings + * used for semantic search. Generally ESQL only sees {@code semantic_text} fields when + * loaded from the index and ESQL will load these fields as strings without their attached + * chunks or embeddings. + */ + SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()); /** * Types that are actively being built. These types are not returned @@ -203,7 +210,8 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG) + Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) ); private final String typeName; diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index d0d6d5fa49c42..0e23b29172c32 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -86,4 +86,9 @@ protected boolean supportsAsync() { protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean supportsInferenceTestService() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 3e799730f7269..8446ac63f43a1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -261,4 +261,9 @@ static boolean hasIndexMetadata(String query) { protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean supportsInferenceTestService() { + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-node/build.gradle b/x-pack/plugin/esql/qa/server/multi-node/build.gradle index 9f8ca78aba81e..2dcc001c4e159 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-node/build.gradle @@ -11,6 +11,7 @@ dependencies { clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') } GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java index bda10709ed947..64c113345bd53 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java @@ -14,7 +14,7 @@ public class EsqlSpecIT extends EsqlSpecTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> {}); + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index ab8e3d4b32d9a..a37db5dc245e0 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -22,6 +22,7 @@ dependencies { clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') } restResources { diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 676fffd553ca8..368eebe808eee 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -18,7 +18,7 @@ @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EsqlSpecIT extends EsqlSpecTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.testCluster(); + public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test")); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 319e67512c7ac..57f58fc448822 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -65,7 +65,10 @@ import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; -import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.clusterHasInferenceEndpoint; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.createInferenceEndpoint; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoint; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; @@ -129,7 +132,11 @@ protected EsqlSpecTestCase( @Before public void setup() throws IOException { - if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { + if (supportsInferenceTestService() && clusterHasInferenceEndpoint(client()) == false) { + createInferenceEndpoint(client()); + } + + if (indexExists(availableDatasetsForEs(client()).iterator().next().indexName()) == false) { loadDataSetIntoEs(client()); } } @@ -148,6 +155,8 @@ public static void wipeTestData() throws IOException { throw e; } } + + deleteInferenceEndpoint(client()); } public boolean logResults() { @@ -164,6 +173,9 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { + if (testCase.requiredCapabilities.contains("semantic_text_type")) { + assumeTrue("Inference test service needs to be supported for semantic_text", supportsInferenceTestService()); + } checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, instructions, Version.CURRENT)); } @@ -207,6 +219,10 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te } } + protected boolean supportsInferenceTestService() { + return true; + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 36d785c24ab23..1a2aa122c85ca 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -134,7 +134,11 @@ private static void assertMetadata( || expectedType == UNSIGNED_LONG)) { continue; } - if (blockType == Type.KEYWORD && (expectedType == Type.IP || expectedType == Type.VERSION || expectedType == Type.TEXT)) { + if (blockType == Type.KEYWORD + && (expectedType == Type.IP + || expectedType == Type.VERSION + || expectedType == Type.TEXT + || expectedType == Type.SEMANTIC_TEXT)) { // Type.asType translates all bytes references into keywords continue; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index eaec6811fbc24..bd8bd0f688837 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -447,6 +447,7 @@ public enum Type { SCALED_FLOAT(s -> s == null ? null : scaledFloat(s, "100"), Double.class), KEYWORD(Object::toString, BytesRef.class), TEXT(Object::toString, BytesRef.class), + SEMANTIC_TEXT(Object::toString, BytesRef.class), IP( StringUtils::parseIP, (l, r) -> l instanceof String maybeIP diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index d63585086f1cd..cf9d66727a900 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -19,6 +19,7 @@ import org.apache.logging.log4j.core.config.plugins.util.PluginManager; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.Strings; @@ -36,9 +37,11 @@ import java.net.URI; import java.net.URL; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.CsvTestUtils.COMMA_ESCAPING_REGEX; @@ -81,6 +84,7 @@ public class CsvTestsDataLoader { private static final TestsDataset K8S = new TestsDataset("k8s", "k8s-mappings.json", "k8s.csv").withSetting("k8s-settings.json"); private static final TestsDataset ADDRESSES = new TestsDataset("addresses"); private static final TestsDataset BOOKS = new TestsDataset("books"); + private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), @@ -112,7 +116,8 @@ public class CsvTestsDataLoader { Map.entry(K8S.indexName, K8S), Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), - Map.entry(BOOKS.indexName, BOOKS) + Map.entry(BOOKS.indexName, BOOKS), + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); @@ -219,8 +224,13 @@ public static void main(String[] args) throws IOException { } } - private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { - loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class), indexCreator); + public static Set availableDatasetsForEs(RestClient client) throws IOException { + boolean inferenceEnabled = clusterHasInferenceEndpoint(client); + + return CSV_DATASET_MAP.values() + .stream() + .filter(d -> d.requiresInferenceEndpoint == false || inferenceEnabled) + .collect(Collectors.toCollection(HashSet::new)); } public static void loadDataSetIntoEs(RestClient client) throws IOException { @@ -229,22 +239,61 @@ public static void loadDataSetIntoEs(RestClient client) throws IOException { }); } - public static void loadDataSetIntoEs(RestClient client, Logger logger) throws IOException { - loadDataSetIntoEs(client, logger, (restClient, indexName, indexMapping, indexSettings) -> { - ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); - }); - } + private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); - private static void loadDataSetIntoEs(RestClient client, Logger logger, IndexCreator indexCreator) throws IOException { - for (var dataset : CSV_DATASET_MAP.values()) { + Set loadedDatasets = new HashSet<>(); + for (var dataset : availableDatasetsForEs(client)) { load(client, dataset, logger, indexCreator); + loadedDatasets.add(dataset.indexName); } - forceMerge(client, CSV_DATASET_MAP.keySet(), logger); + forceMerge(client, loadedDatasets, logger); for (var policy : ENRICH_POLICIES) { loadEnrichPolicy(client, policy.policyName, policy.policyFileName, logger); } } + /** The semantic_text mapping type require an inference endpoint that needs to be setup before creating the index. */ + public static void createInferenceEndpoint(RestClient client) throws IOException { + Request request = new Request("PUT", "_inference/sparse_embedding/test_sparse_inference"); + request.setJsonEntity(""" + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + """); + client.performRequest(request); + } + + public static void deleteInferenceEndpoint(RestClient client) throws IOException { + try { + client.performRequest(new Request("DELETE", "_inference/test_sparse_inference")); + } catch (ResponseException e) { + // 404 here means the endpoint was not created + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } + + public static boolean clusterHasInferenceEndpoint(RestClient client) throws IOException { + Request request = new Request("GET", "_inference/sparse_embedding/test_sparse_inference"); + try { + client.performRequest(request); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 404) { + return false; + } + throw e; + } + return true; + } + private static void loadEnrichPolicy(RestClient client, String policyName, String policyFileName, Logger logger) throws IOException { URL policyMapping = CsvTestsDataLoader.class.getResource("/" + policyFileName); if (policyMapping == null) { @@ -511,34 +560,79 @@ public record TestsDataset( String dataFileName, String settingFileName, boolean allowSubFields, - Map typeMapping + Map typeMapping, + boolean requiresInferenceEndpoint ) { public TestsDataset(String indexName, String mappingFileName, String dataFileName) { - this(indexName, mappingFileName, dataFileName, null, true, null); + this(indexName, mappingFileName, dataFileName, null, true, null, false); } public TestsDataset(String indexName) { - this(indexName, "mapping-" + indexName + ".json", indexName + ".csv", null, true, null); + this(indexName, "mapping-" + indexName + ".json", indexName + ".csv", null, true, null, false); } public TestsDataset withIndex(String indexName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withData(String dataFileName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withSetting(String settingFileName) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset noSubfields() { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, false, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + false, + typeMapping, + requiresInferenceEndpoint + ); } public TestsDataset withTypeMapping(Map typeMapping) { - return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + return new TestsDataset( + indexName, + mappingFileName, + dataFileName, + settingFileName, + allowSubFields, + typeMapping, + requiresInferenceEndpoint + ); + } + + public TestsDataset withInferenceEndpoint(boolean needsInference) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping, needsInference); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index f5bcb37c63e84..d71c66b4c467f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -648,7 +648,7 @@ public static Literal randomLiteral(DataType type) { case KEYWORD -> new BytesRef(randomAlphaOfLength(5)); case IP -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); case TIME_DURATION -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days - case TEXT -> new BytesRef(randomAlphaOfLength(50)); + case TEXT, SEMANTIC_TEXT -> new BytesRef(randomAlphaOfLength(50)); case VERSION -> randomVersion().toBytesRef(); case GEO_POINT -> GEO.asWkb(GeometryTestUtils.randomPoint()); case CARTESIAN_POINT -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json new file mode 100644 index 0000000000000..b110d6fd4cdd5 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json @@ -0,0 +1,73 @@ +{ + "properties": { + "semantic_text_field": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_bool": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_cartesian_point": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_cartesian_shape": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_datetime": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_double": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_geopoint": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_geoshape": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_integer": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_ip": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_long": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_unsigned_long": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_version": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_multi_value": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "st_unicode": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" + }, + "host" : { + "type" : "keyword" + }, + "description" : { + "type" : "text" + }, + "value": { + "type": "long" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv new file mode 100644 index 0000000000000..c6de9a208e9a7 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv @@ -0,0 +1,4 @@ +_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long +1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001 +2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002 +3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec new file mode 100644 index 0000000000000..683bcdc3f7490 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec @@ -0,0 +1,175 @@ +simple +required_capability: semantic_text_type + +FROM semantic_text +| KEEP semantic_text_field +| sort semantic_text_field asc; + +semantic_text_field:semantic_text +all we have to decide is what to do with the time that is given to us +be excellent to each other +live long and prosper +; + +simpleWithUnicode +required_capability: semantic_text_type + +FROM semantic_text +| KEEP st_unicode +| SORT st_unicode +; + +st_unicode:semantic_text +你吃饭了吗 +["谢谢", "对不起我的中文不好"] +null +; + +mvExpand +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, st_multi_value +| MV_EXPAND st_multi_value +| SORT st_multi_value +; + +_id:keyword | st_multi_value:semantic_text +1 | Hello there! +1 | This is a random value +2 | bye bye! +1 | for testing purposes +2 | nice to meet you +3 | null +; + +withDropAndKeep +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, semantic_text_field, st_double +| DROP st_double +| SORT _id +; + +_id:keyword | semantic_text_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +rename +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| RENAME semantic_text_field AS my_field +| KEEP _id, my_field +| SORT _id +; + +_id:keyword | my_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +eval +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| EVAL my_field = semantic_text_field +| KEEP _id, my_field +| SORT _id +; + +_id:keyword | my_field:semantic_text +1 | live long and prosper +2 | all we have to decide is what to do with the time that is given to us +3 | be excellent to each other +; + +simpleStats +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| STATS COUNT(*) +; + +COUNT(*):long +3 +; + +statsWithGrouping +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| STATS COUNT(*) BY st_version +| SORT st_version +; + +COUNT(*):long | st_version:semantic_text +1 | 1.2.3 +1 | 9.0.0 +1 | null +; + +withDropKeepStatsMvExpandRenameSortLimit +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP _id, semantic_text_field, st_multi_value +| DROP semantic_text_field +| RENAME st_multi_value AS my_field +| MV_EXPAND my_field +| STATS COUNT(*) BY my_field +| SORT my_field +| LIMIT 3 +; + +COUNT(*):long | my_field:semantic_text +1 | Hello there! +1 | This is a random value +1 | bye bye! +; + +simpleWithLongValue +required_capability: semantic_text_type + +FROM semantic_text +| KEEP value, semantic_text_field +| SORT value +; + +value:long | semantic_text_field:semantic_text +1001 | live long and prosper +1002 | all we have to decide is what to do with the time that is given to us +1003 | be excellent to each other +; + +simpleWithText +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP description, semantic_text_field +| SORT description +; + +description:text | semantic_text_field:semantic_text +"some description1" | live long and prosper +"some description2" | all we have to decide is what to do with the time that is given to us +"some description3" | be excellent to each other +; + +simpleWithKeyword +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| KEEP host, semantic_text_field +| SORT host +; + +host:keyword | semantic_text_field:semantic_text +"host1" | live long and prosper +"host2" | all we have to decide is what to do with the time that is given to us +"host3" | be excellent to each other +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index adfba4c487618..3c39406198da3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -395,7 +395,11 @@ public enum Cap { /** * Adding stats for functions (stack telemetry) */ - FUNCTION_STATS; + FUNCTION_STATS, + /** + * Support for semantic_text field mapping + */ + SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 0b1bafdab1a99..0def56c70dc35 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -89,7 +89,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(unsignedLongAsNumber(l)); } }; - case KEYWORD, TEXT -> new PositionToXContent(block) { + case KEYWORD, SEMANTIC_TEXT, TEXT -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 3b18bda120e2e..49fcc167dce0f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -114,7 +114,7 @@ private static Object valueAt(DataType dataType, Block block, int offset, BytesR case LONG, COUNTER_LONG -> ((LongBlock) block).getLong(offset); case INTEGER, COUNTER_INTEGER -> ((IntBlock) block).getInt(offset); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock) block).getDouble(offset); - case KEYWORD, TEXT -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); + case KEYWORD, SEMANTIC_TEXT, TEXT -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); case IP -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); yield ipToString(val); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index b28c80211c649..dc732258d9fa5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -349,7 +349,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte elementTypes[channel] = PlannerUtils.toElementType(inverse.get(channel).type()); encoders[channel] = switch (inverse.get(channel).type()) { case IP -> TopNEncoder.IP; - case TEXT, KEYWORD -> TopNEncoder.UTF8; + case TEXT, KEYWORD, SEMANTIC_TEXT -> TopNEncoder.UTF8; case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 7beed64dda8cb..7868984d6b6e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -247,7 +247,7 @@ public static ElementType toElementType(DataType dataType, MappedFieldType.Field case INTEGER, COUNTER_INTEGER -> ElementType.INT; case DOUBLE, COUNTER_DOUBLE -> ElementType.DOUBLE; // unsupported fields are passed through as a BytesRef - case KEYWORD, TEXT, IP, SOURCE, VERSION, UNSUPPORTED -> ElementType.BYTES_REF; + case KEYWORD, TEXT, IP, SOURCE, VERSION, SEMANTIC_TEXT, UNSUPPORTED -> ElementType.BYTES_REF; case NULL -> ElementType.NULL; case BOOLEAN -> ElementType.BOOLEAN; case DOC_DATA_TYPE -> ElementType.DOC; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index b147cfde21721..27343bf7ce205 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -193,7 +193,7 @@ private Page randomPage(List columns) { case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(randomInt()); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(randomDouble()); case KEYWORD -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10))); - case TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); + case TEXT, SEMANTIC_TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))) ); @@ -866,7 +866,7 @@ static Page valuesToPage(BlockFactory blockFactory, List columns case LONG, COUNTER_LONG -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); - case KEYWORD, TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(value.toString())); + case KEYWORD, TEXT, SEMANTIC_TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(value.toString())); case UNSUPPORTED -> ((BytesRefBlock.Builder) builder).appendNull(); case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); case DATETIME -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 9d0d9c3da30a8..db3fce244c9a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -59,7 +59,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.NULL ).collect(Collectors.toList()); if (Build.current().isSnapshot()) { - t.addAll(DataType.UNDER_CONSTRUCTION.keySet()); + t.add(DataType.DATE_NANOS); } TYPES = unmodifiableList(t); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index ce0b3a099d472..fb18cfb4959c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -23,6 +23,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.FieldMapper; @@ -611,6 +613,13 @@ private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMes return baseMessageBuilder.toString(); } + + @Override + public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) { + SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name().concat(".text"))); + var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + } } /** diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index e100f30717aef..049895bc9f31a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -504,3 +504,52 @@ double nested declared in mapping: # The `nested` field is not visible, nor are any of it's subfields. - match: { columns: [{name: name, type: keyword}] } + +--- +semantic_text declared in mapping: + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: POST + path: /_query + parameters: [ ] + capabilities: [ semantic_text_type ] + reason: "support for semantic_text type" + - do: + indices.create: + index: test_semantic_text + body: + settings: + number_of_shards: 5 + mappings: + properties: + semantic_text_field: + type: semantic_text + inference_id: my_inference_id + - do: + bulk: + index: test_semantic_text + refresh: true + body: + - { "index": { } } + - { + "semantic_text_field": { + "text": "be excellent to each other", + "inference": { + "inference_id": "my_inference_id", + "model_settings": { + "task_type": "sparse_embedding" + }, + "chunks": [{ "text": "be excellent to each other", "embeddings": { "a": 1,"b": 2 } }] + } + } + } + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test_semantic_text' + - match: { columns: [{name: semantic_text_field, type: semantic_text}] } + - length: { values: 1 } + - match: { values.0: ["be excellent to each other"] } From 4f08b7e57876cf87ffccd08f3e2b459800b71cf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 21 Oct 2024 16:04:54 +0200 Subject: [PATCH 42/67] Update APM Java Agent to support JDK 23 (#115194) --- docs/changelog/115194.yaml | 7 +++++++ gradle/verification-metadata.xml | 6 +++--- modules/apm/build.gradle | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/115194.yaml diff --git a/docs/changelog/115194.yaml b/docs/changelog/115194.yaml new file mode 100644 index 0000000000000..0b201b9f89aa5 --- /dev/null +++ b/docs/changelog/115194.yaml @@ -0,0 +1,7 @@ +pr: 115194 +summary: Update APM Java Agent to support JDK 23 +area: Infra/Metrics +type: upgrade +issues: + - 115101 + - 115100 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4d9b96184d07a..e2dfa89c8f3b8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -69,9 +69,9 @@ - - - + + + diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index 4c822e44da6f6..b510e2403e933 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "io.opentelemetry:opentelemetry-api:${otelVersion}" implementation "io.opentelemetry:opentelemetry-context:${otelVersion}" implementation "io.opentelemetry:opentelemetry-semconv:${otelSemconvVersion}" - runtimeOnly "co.elastic.apm:elastic-apm-agent:1.44.0" + runtimeOnly "co.elastic.apm:elastic-apm-agent:1.52.0" } tasks.named("dependencyLicenses").configure { From 1cf8d496c80afcba0d32f4b501927fecb10efc23 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 21 Oct 2024 15:09:20 +0100 Subject: [PATCH 43/67] [ML] Do not create the .inference index as a side effect of calling usage (#115023) The Inference usage API calls GET _inference/_all and because the default configs are persisted on read it causes the creation of the .inference index. This action is undesirable and causes test failures by leaking the system index out of the test clean up code. --- muted-tests.yml | 15 ------ .../org/elasticsearch/TransportVersions.java | 1 + .../test/rest/ESRestTestCase.java | 2 - .../action/GetInferenceModelAction.java | 34 +++++++++++- .../integration/ModelRegistryIT.java | 54 +++++++++++++++++-- .../TransportGetInferenceModelAction.java | 5 +- .../action/TransportInferenceUsageAction.java | 2 +- .../inference/registry/ModelRegistry.java | 45 ++++++++++++---- .../action/GetInferenceModelRequestTests.java | 9 +++- 9 files changed, 129 insertions(+), 38 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1dda90369ce76..1818c55f16fdf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -297,12 +297,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 -- class: org.elasticsearch.xpack.eql.EqlRestIT - method: testIndexWildcardPatterns - issue: https://github.com/elastic/elasticsearch/issues/114749 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testEnrichSpecialTypes - issue: https://github.com/elastic/elasticsearch/issues/114773 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 @@ -312,23 +306,14 @@ tests: - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testImmutablePolicy - issue: https://github.com/elastic/elasticsearch/issues/114839 - class: org.elasticsearch.license.LicensingTests issue: https://github.com/elastic/elasticsearch/issues/114865 -- class: org.elasticsearch.xpack.enrich.EnrichIT - method: testDeleteIsCaseSensitive - issue: https://github.com/elastic/elasticsearch/issues/114840 - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/114885 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} issue: https://github.com/elastic/elasticsearch/issues/114902 -- class: org.elasticsearch.xpack.enrich.EnrichRestIT - method: test {p0=enrich/40_synthetic_source/enrich documents over _bulk} - issue: https://github.com/elastic/elasticsearch/issues/114825 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d85990b4ede8c..cde09d33516c9 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -177,6 +177,7 @@ static TransportVersion def(int id) { public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index e5b23158d4fd4..d17016f850300 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1121,8 +1121,6 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE if (preserveSecurityIndices) { indexPatterns.add("-.security-*"); } - // always preserve inference index - indexPatterns.add("-.inference"); final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); final Response response = adminClient().performRequest(deleteRequest); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index 5a779ada4e182..6e06133509644 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -34,19 +34,40 @@ public GetInferenceModelAction() { public static class Request extends AcknowledgedRequest { + private static boolean PERSIST_DEFAULT_CONFIGS = true; + private final String inferenceEntityId; private final TaskType taskType; + // Default endpoint configurations are persisted on first read. + // Set to false to avoid persisting on read. + // This setting only applies to GET * requests. It has + // no effect when getting a single model + private final boolean persistDefaultConfig; public Request(String inferenceEntityId, TaskType taskType) { super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); this.taskType = Objects.requireNonNull(taskType); + this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; + } + + public Request(String inferenceEntityId, TaskType taskType, boolean persistDefaultConfig) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); + this.taskType = Objects.requireNonNull(taskType); + this.persistDefaultConfig = persistDefaultConfig; } public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + this.persistDefaultConfig = in.readBoolean(); + } else { + this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; + } + } public String getInferenceEntityId() { @@ -57,11 +78,18 @@ public TaskType getTaskType() { return taskType; } + public boolean isPersistDefaultConfig() { + return persistDefaultConfig; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(inferenceEntityId); taskType.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ)) { + out.writeBoolean(this.persistDefaultConfig); + } } @Override @@ -69,12 +97,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(inferenceEntityId, request.inferenceEntityId) && taskType == request.taskType; + return Objects.equals(inferenceEntityId, request.inferenceEntityId) + && taskType == request.taskType + && persistDefaultConfig == request.persistDefaultConfig; } @Override public int hashCode() { - return Objects.hash(inferenceEntityId, taskType); + return Objects.hash(inferenceEntityId, taskType, persistDefaultConfig); } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index e62cdcdc7fd2a..8713511c2f5f2 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; @@ -251,7 +252,7 @@ public void testGetAllModels() throws InterruptedException { } AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -333,14 +334,14 @@ public void testGetAllModels_WithDefaults() throws Exception { } AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(totalModelCount)); var getAllModels = modelHolder.get(); assertReturnModelIsModifiable(modelHolder.get().get(0)); // same result but configs should have been persisted this time - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(totalModelCount)); @@ -387,7 +388,7 @@ public void testGetAllModels_OnlyDefaults() throws Exception { AtomicReference exceptionHolder = new AtomicReference<>(); AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getAllModels(randomBoolean(), listener), modelHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(2)); var getAllModels = modelHolder.get(); @@ -405,6 +406,44 @@ public void testGetAllModels_OnlyDefaults() throws Exception { } } + public void testGetAllModels_withDoNotPersist() throws Exception { + int defaultModelCount = 2; + var serviceName = "foo"; + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); + for (int i = 0; i < defaultModelCount; i++) { + var id = "default-" + i; + var taskType = randomFrom(TaskType.values()); + defaultConfigs.add(createModel(id, taskType, serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); + } + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener>) invocation.getArguments()[0]; + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); + + AtomicReference exceptionHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(false, listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + + expectThrows(IndexNotFoundException.class, () -> client().admin().indices().prepareGetIndex().addIndices(".inference").get()); + + // this time check the index is created + blockingCall(listener -> modelRegistry.getAllModels(true, listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + assertInferenceIndexExists(); + } + public void testGet_WithDefaults() throws InterruptedException { var serviceName = "foo"; var service = mock(InferenceService.class); @@ -513,6 +552,12 @@ public void testGetByTaskType_WithDefaults() throws Exception { assertReturnModelIsModifiable(modelHolder.get().get(0)); } + private void assertInferenceIndexExists() { + var indexResponse = client().admin().indices().prepareGetIndex().addIndices(".inference").get(); + assertNotNull(indexResponse.getSettings()); + assertNotNull(indexResponse.getMappings()); + } + @SuppressWarnings("unchecked") private void assertReturnModelIsModifiable(UnparsedModel unparsedModel) { var settings = unparsedModel.settings(); @@ -551,7 +596,6 @@ private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) ); default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); }; - } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 5ee1e40869dbc..edcec45b50a16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -69,7 +69,7 @@ protected void doExecute( boolean inferenceEntityIdIsWildCard = Strings.isAllOrWildcard(request.getInferenceEntityId()); if (request.getTaskType() == TaskType.ANY && inferenceEntityIdIsWildCard) { - getAllModels(listener); + getAllModels(request.isPersistDefaultConfig(), listener); } else if (inferenceEntityIdIsWildCard) { getModelsByTaskType(request.getTaskType(), listener); } else { @@ -100,8 +100,9 @@ private void getSingleModel( })); } - private void getAllModels(ActionListener listener) { + private void getAllModels(boolean persistDefaultEndpoints, ActionListener listener) { modelRegistry.getAllModels( + persistDefaultEndpoints, listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java index 624afff9f5d11..7b7475efac334 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java @@ -63,7 +63,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY); + GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY, false); client.execute(GetInferenceModelAction.INSTANCE, getInferenceModelAction, listener.delegateFailureAndWrap((delegate, response) -> { Map stats = new TreeMap<>(); for (ModelConfigurations model : response.getEndpoints()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 260d4e663dafd..4506a05d58054 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -69,6 +69,17 @@ import static org.elasticsearch.core.Strings.format; +/** + * Class for persisting and reading inference endpoint configurations. + * Some inference services provide default configurations, the registry is + * made aware of these at start up via {@link #addDefaultIds(InferenceService.DefaultConfigId)}. + * Only the ids and service details are registered at this point + * as the full config definition may not be known at start up. + * The full config is lazily populated on read and persisted to the + * index. This has the effect of creating the backing index on reading + * the configs. {@link #getAllModels(boolean, ActionListener)} has an option + * to not write the default configs to index on read to avoid index creation. + */ public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} @@ -132,7 +143,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener lis if (searchResponse.getHits().getHits().length == 0) { var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); if (maybeDefault.isPresent()) { - getDefaultConfig(maybeDefault.get(), listener); + getDefaultConfig(true, maybeDefault.get(), listener); } else { delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); } @@ -199,7 +210,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); var defaultConfigsForTaskType = taskTypeMatchedDefaults(taskType, defaultConfigIds); - addAllDefaultConfigsIfMissing(modelConfigs, defaultConfigsForTaskType, delegate); + addAllDefaultConfigsIfMissing(true, modelConfigs, defaultConfigsForTaskType, delegate); }); QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(TASK_TYPE_FIELD, taskType.toString())); @@ -216,13 +227,20 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { + public void getAllModels(boolean persistDefaultEndpoints, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var foundConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - addAllDefaultConfigsIfMissing(foundConfigs, defaultConfigIds, delegate); + addAllDefaultConfigsIfMissing(persistDefaultEndpoints, foundConfigs, defaultConfigIds, delegate); }); // In theory the index should only contain model config documents @@ -241,6 +259,7 @@ public void getAllModels(ActionListener> listener) { } private void addAllDefaultConfigsIfMissing( + boolean persistDefaultEndpoints, List foundConfigs, List matchedDefaults, ActionListener> listener @@ -263,18 +282,26 @@ private void addAllDefaultConfigsIfMissing( ); for (var required : missing) { - getDefaultConfig(required, groupedListener); + getDefaultConfig(persistDefaultEndpoints, required, groupedListener); } } } - private void getDefaultConfig(InferenceService.DefaultConfigId defaultConfig, ActionListener listener) { + private void getDefaultConfig( + boolean persistDefaultEndpoints, + InferenceService.DefaultConfigId defaultConfig, + ActionListener listener + ) { defaultConfig.service().defaultConfigs(listener.delegateFailureAndWrap((delegate, models) -> { boolean foundModel = false; for (var m : models) { if (m.getInferenceEntityId().equals(defaultConfig.inferenceId())) { foundModel = true; - storeDefaultEndpoint(m, () -> listener.onResponse(modelToUnparsedModel(m))); + if (persistDefaultEndpoints) { + storeDefaultEndpoint(m, () -> listener.onResponse(modelToUnparsedModel(m))); + } else { + listener.onResponse(modelToUnparsedModel(m)); + } break; } } @@ -287,7 +314,7 @@ private void getDefaultConfig(InferenceService.DefaultConfigId defaultConfig, Ac })); } - public void storeDefaultEndpoint(Model preconfigured, Runnable runAfter) { + private void storeDefaultEndpoint(Model preconfigured, Runnable runAfter) { var responseListener = ActionListener.wrap(success -> { logger.debug("Added default inference endpoint [{}]", preconfigured.getInferenceEntityId()); }, exception -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java index 93694f167259f..314b3037fdd63 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -15,7 +15,7 @@ public class GetInferenceModelRequestTests extends AbstractWireSerializingTestCase { public static GetInferenceModelAction.Request randomTestInstance() { - return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values())); + return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values()), randomBoolean()); } @Override @@ -30,12 +30,17 @@ protected GetInferenceModelAction.Request createTestInstance() { @Override protected GetInferenceModelAction.Request mutateInstance(GetInferenceModelAction.Request instance) { - return switch (randomIntBetween(0, 1)) { + return switch (randomIntBetween(0, 2)) { case 0 -> new GetInferenceModelAction.Request(instance.getInferenceEntityId() + "foo", instance.getTaskType()); case 1 -> { var nextTaskType = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; yield new GetInferenceModelAction.Request(instance.getInferenceEntityId(), nextTaskType); } + case 2 -> new GetInferenceModelAction.Request( + instance.getInferenceEntityId(), + instance.getTaskType(), + instance.isPersistDefaultConfig() == false + ); default -> throw new UnsupportedOperationException(); }; } From b0e3b79ad27d3131478ad907819f26786818db1a Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 21 Oct 2024 16:34:04 +0200 Subject: [PATCH 44/67] ES|QL: relax tests on usage stats (#115214) --- .../yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index e1fd9b0201a35..7d1a4e123299b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -83,4 +83,3 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 117} From deef8c7a9bb65b78685d4e5e31a47b1c928924b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:41:28 +0200 Subject: [PATCH 45/67] [DOCS] Add DLS multi-match limitation (#115003) --- docs/reference/security/limitations.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/security/limitations.asciidoc b/docs/reference/security/limitations.asciidoc index 96af0e01c8075..b1bdd8cbbf5d5 100644 --- a/docs/reference/security/limitations.asciidoc +++ b/docs/reference/security/limitations.asciidoc @@ -81,12 +81,13 @@ including the following queries: * A search request cannot be profiled if document level security is enabled. * The <> does not return terms if document level security is enabled. +* The <> query does not support specifying fields using wildcards. NOTE: While document-level security prevents users from viewing restricted documents, it's still possible to write search requests that return aggregate information about the entire index. A user whose access is restricted to specific documents in an index could still learn about field names and terms that only exist in inaccessible -documents, and count how many inaccessible documents contain a given term. +documents, and count how many inaccessible documents contain a given term. [discrete] [[alias-limitations]] From 5e761fe4d0733c3cdc6d60c5f955dcd9086c1843 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 21 Oct 2024 17:43:45 +0300 Subject: [PATCH 46/67] Change backwards test configuration to use trial license (#115226) * Change backwards test configuration to use trial license * unmute --- muted-tests.yml | 2 -- qa/mixed-cluster/build.gradle | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1818c55f16fdf..4dc177ef001fd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,8 +346,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/115213 - class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests method: testEvaluateInManyThreads {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/115227 diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index a5b7ae8d703ea..23d7af7603d56 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -71,6 +71,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> numberOfNodes = 4 setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' + setting "xpack.license.self_generated.type", "trial" /* There is a chance we have more master changes than "normal", so to avoid this test from failing, we increase the threshold (as this purpose of this test isn't to test that specific indicator). */ if (bwcVersion.onOrAfter(Version.fromString("8.4.0"))) { From 8c378754ab3ee25725cc08810b04237e5ee022e6 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 21 Oct 2024 16:07:06 +0100 Subject: [PATCH 47/67] Remove ChunkedToXContentHelper.array method, swap for ChunkedToXContentBuilder (#114319) --- .../cluster/metadata/IndexGraveyard.java | 6 +- .../AbstractAllocationDecision.java | 7 +- .../AllocateUnassignedDecision.java | 6 +- .../routing/allocation/MoveDecision.java | 6 +- .../xcontent/ChunkedToXContentBuilder.java | 22 ++- .../xcontent/ChunkedToXContentHelper.java | 4 - .../org/elasticsearch/health/Diagnosis.java | 58 +++----- .../health/HealthIndicatorResult.java | 45 +++--- .../elasticsearch/ingest/IngestMetadata.java | 6 +- .../PersistentTasksCustomMetadata.java | 10 +- .../org/elasticsearch/script/ScriptStats.java | 38 ++--- .../org/elasticsearch/search/SearchHits.java | 31 ++-- .../health/HealthIndicatorResultTests.java | 6 +- .../results/ChatCompletionResults.java | 4 +- ...nferenceChunkedSparseEmbeddingResults.java | 4 +- ...erenceChunkedTextEmbeddingByteResults.java | 4 +- ...renceChunkedTextEmbeddingFloatResults.java | 4 +- .../InferenceTextEmbeddingByteResults.java | 4 +- .../InferenceTextEmbeddingFloatResults.java | 4 +- .../inference/results/RankedDocsResults.java | 4 +- .../results/SparseEmbeddingResults.java | 4 +- .../compute/operator/DriverProfile.java | 39 +++-- .../action/GetFlamegraphResponse.java | 138 +++++++----------- 23 files changed, 197 insertions(+), 257 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 320be8acb0af9..62867b4260bfd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -128,8 +128,8 @@ public boolean containsIndex(final Index index) { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).array(TOMBSTONES_FIELD.getPreferredName(), tombstones.iterator()); } public static IndexGraveyard fromXContent(final XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 7bb97faa6b2d0..827cc378ef3a9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -140,7 +141,11 @@ public static Iterator nodeDecisionsToXContentChunked(List toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return Iterators.concat(Iterators.single((builder, p) -> { + return ChunkedToXContent.builder(params).append((builder, p) -> { builder.field("can_allocate", getAllocationDecision()); builder.field("allocate_explanation", getExplanation()); if (targetNode != null) { @@ -320,7 +320,7 @@ public Iterator toXContentChunked(ToXContent.Params params ); } return builder; - }), nodeDecisionsToXContentChunked(nodeDecisions)); + }).append(nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index 891818b8e68f7..5dfac293de491 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -12,9 +12,9 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; @@ -260,7 +260,7 @@ public String getExplanation() { @Override public Iterator toXContentChunked(ToXContent.Params params) { checkDecisionState(); - return Iterators.concat(Iterators.single((builder, p) -> { + return ChunkedToXContent.builder(params).append((builder, p) -> { if (targetNode != null) { builder.startObject("target_node"); discoveryNodeToXContent(targetNode, true, builder); @@ -289,7 +289,7 @@ public Iterator toXContentChunked(ToXContent.Params params builder.field("move_explanation", getExplanation()); } return builder; - }), nodeDecisionsToXContentChunked(nodeDecisions)); + }).append(nodeDecisionsToXContentChunked(nodeDecisions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java index 0102e58c7c1dc..a3141bff7c6e2 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java @@ -248,7 +248,7 @@ private void endArray() { addChunk((b, p) -> b.endArray()); } - public ChunkedToXContentBuilder array(String name, String... values) { + public ChunkedToXContentBuilder array(String name, String[] values) { addChunk((b, p) -> b.array(name, values)); return this; } @@ -350,6 +350,26 @@ public ChunkedToXContentBuilder field(String name, Long value) { return this; } + public ChunkedToXContentBuilder field(String name, float value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Float value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, double value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Double value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + public ChunkedToXContentBuilder field(String name, String value) { addChunk((b, p) -> b.field(name, value)); return this; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index fcbe0ac2b2edb..2e78cc6f516b1 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -53,10 +53,6 @@ public static Iterator field(String name, String value) { return Iterators.single(((builder, params) -> builder.field(name, value))); } - public static Iterator array(String name, Iterator contents) { - return Iterators.concat(ChunkedToXContentHelper.startArray(name), contents, ChunkedToXContentHelper.endArray()); - } - /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/health/Diagnosis.java b/server/src/main/java/org/elasticsearch/health/Diagnosis.java index 41301e2d52a53..b1af4a1c383da 100644 --- a/server/src/main/java/org/elasticsearch/health/Diagnosis.java +++ b/server/src/main/java/org/elasticsearch/health/Diagnosis.java @@ -10,14 +10,12 @@ package org.elasticsearch.health; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; import java.util.Collection; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -78,22 +76,20 @@ public Resource(Collection nodes) { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator valuesIterator; + public Iterator toXContentChunked(ToXContent.Params params) { + var builder = ChunkedToXContent.builder(params); if (nodes != null) { - valuesIterator = Iterators.map(nodes.iterator(), node -> (builder, params) -> { - builder.startObject(); - builder.field(ID_FIELD, node.getId()); + return builder.array(type.displayValue, nodes.iterator(), node -> (b, p) -> { + b.startObject(); + b.field(ID_FIELD, node.getId()); if (node.getName() != null) { - builder.field(NAME_FIELD, node.getName()); + b.field(NAME_FIELD, node.getName()); } - builder.endObject(); - return builder; + return b.endObject(); }); } else { - valuesIterator = Iterators.map(values.iterator(), value -> (builder, params) -> builder.value(value)); + return builder.array(type.displayValue, values.toArray(String[]::new)); } - return ChunkedToXContentHelper.array(type.displayValue, valuesIterator); } @Override @@ -144,30 +140,18 @@ public String getUniqueId() { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator resourcesIterator; - if (affectedResources == null) { - resourcesIterator = Collections.emptyIterator(); - } else { - resourcesIterator = Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { - builder.startObject(); - builder.field("id", definition.getUniqueId()); - builder.field("cause", definition.cause); - builder.field("action", definition.action); - builder.field("help_url", definition.helpURL); - - if (affectedResources != null && affectedResources.size() > 0) { - builder.startObject("affected_resources"); - } - return builder; - }), resourcesIterator, Iterators.single((builder, params) -> { - if (affectedResources != null && affectedResources.size() > 0) { - builder.endObject(); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.field("id", definition.getUniqueId()); + b.field("cause", definition.cause); + b.field("action", definition.action); + b.field("help_url", definition.helpURL); + return b; + }); + if (affectedResources != null && affectedResources.isEmpty() == false) { + ob.object("affected_resources", affectedResources.iterator(), ChunkedToXContentBuilder::append); } - builder.endObject(); - return builder; - })); + }); } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index 6944ac74c8115..1a84abd9f7c16 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -9,11 +9,11 @@ package org.elasticsearch.health; -import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ToXContent; -import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -26,33 +26,22 @@ public record HealthIndicatorResult( List diagnosisList ) implements ChunkedToXContentObject { @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator diagnosisIterator; - if (diagnosisList == null) { - diagnosisIterator = Collections.emptyIterator(); - } else { - diagnosisIterator = Iterators.flatMap(diagnosisList.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { - builder.startObject(); - builder.field("status", status.xContentValue()); - builder.field("symptom", symptom); - if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { - builder.field("details", details, params); - } - if (impacts != null && impacts.isEmpty() == false) { - builder.field("impacts", impacts); - } - if (diagnosisList != null && diagnosisList.isEmpty() == false) { - builder.startArray("diagnosis"); - } - return builder; - }), diagnosisIterator, Iterators.single((builder, params) -> { + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.field("status", status.xContentValue()); + b.field("symptom", symptom); + if (details != null && HealthIndicatorDetails.EMPTY.equals(details) == false) { + b.field("details", details, p); + } + if (impacts != null && impacts.isEmpty() == false) { + b.field("impacts", impacts); + } + return b; + }); if (diagnosisList != null && diagnosisList.isEmpty() == false) { - builder.endArray(); + ob.array("diagnosis", diagnosisList.iterator(), ChunkedToXContentBuilder::append); } - builder.endObject(); - return builder; - })); + }); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java index 05da1f4784649..316f621e80669 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestMetadata.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -98,8 +98,8 @@ public static IngestMetadata fromXContent(XContentParser parser) throws IOExcept } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).array(PIPELINES_FIELD.getPreferredName(), pipelines.values().iterator()); } @Override diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java index 55753f2827d2a..bde2d55ef2940 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetadata.java @@ -17,12 +17,11 @@ import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -552,11 +551,8 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOE } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - Iterators.single((builder, params) -> builder.field("last_allocation_id", lastAllocationId)), - ChunkedToXContentHelper.array("tasks", tasks.values().iterator()) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).field("last_allocation_id", lastAllocationId).array("tasks", tasks.values().iterator()); } public static Builder builder() { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptStats.java b/server/src/main/java/org/elasticsearch/script/ScriptStats.java index 9ac060ad063a0..f24052ef7e3a9 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptStats.java @@ -10,13 +10,11 @@ package org.elasticsearch.script; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -27,7 +25,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.collect.Iterators.single; import static org.elasticsearch.script.ScriptContextStats.Fields.COMPILATIONS_HISTORY; import static org.elasticsearch.script.ScriptStats.Fields.CACHE_EVICTIONS; import static org.elasticsearch.script.ScriptStats.Fields.COMPILATIONS; @@ -193,28 +190,19 @@ public ScriptCacheStats toScriptCacheStats() { } @Override - public Iterator toXContentChunked(ToXContent.Params outerParams) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(SCRIPT_STATS), - ChunkedToXContentHelper.field(COMPILATIONS, compilations), - ChunkedToXContentHelper.field(CACHE_EVICTIONS, cacheEvictions), - ChunkedToXContentHelper.field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered), - single((builder, params) -> { - if (compilationsHistory != null && compilationsHistory.areTimingsEmpty() == false) { - builder.startObject(COMPILATIONS_HISTORY); - compilationsHistory.toXContent(builder, params); - builder.endObject(); - } - if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { - builder.startObject(COMPILATIONS_HISTORY); - cacheEvictionsHistory.toXContent(builder, params); - builder.endObject(); - } - return builder; - }), - ChunkedToXContentHelper.array(CONTEXTS, contextStats.iterator()), - ChunkedToXContentHelper.endObject() - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(SCRIPT_STATS, ob -> { + ob.field(COMPILATIONS, compilations); + ob.field(CACHE_EVICTIONS, cacheEvictions); + ob.field(COMPILATION_LIMIT_TRIGGERED, compilationLimitTriggered); + if (compilationsHistory != null && compilationsHistory.areTimingsEmpty() == false) { + ob.xContentObject(COMPILATIONS_HISTORY, compilationsHistory); + } + if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { + ob.xContentObject(COMPILATIONS_HISTORY, cacheEvictionsHistory); + } + ob.array(CONTEXTS, contextStats.iterator()); + }); } static final class Fields { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 896dd7f999949..fe133cbac335d 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.SimpleRefCounted; @@ -285,26 +284,22 @@ public static final class Fields { @Override public Iterator toXContentChunked(ToXContent.Params params) { assert hasReferences(); - return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { - boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); + return ChunkedToXContent.builder(params).object(Fields.HITS, ob -> { + boolean totalHitAsInt = ob.params().paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value(); - b.field(Fields.TOTAL, total); + ob.field(Fields.TOTAL, totalHits == null ? -1 : totalHits.value()); } else if (totalHits != null) { - b.startObject(Fields.TOTAL); - b.field("value", totalHits.value()); - b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); - b.endObject(); + ob.append((b, p) -> { + b.startObject(Fields.TOTAL); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); + return b.endObject(); + }); } - return b; - }), Iterators.single((b, p) -> { - if (Float.isNaN(maxScore)) { - b.nullField(Fields.MAX_SCORE); - } else { - b.field(Fields.MAX_SCORE, maxScore); - } - return b; - }), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject()); + + ob.field(Fields.MAX_SCORE, Float.isNaN(maxScore) ? null : maxScore); + ob.array(Fields.HITS, Iterators.forArray(hits)); + }); } @Override diff --git a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java index 91936ea8b9092..cba0dacccd8bd 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthIndicatorResultTests.java @@ -198,8 +198,8 @@ public void testChunkCount() { diagnosisList.add(diagnosis2); HealthIndicatorResult result = new HealthIndicatorResult(name, status, symptom, details, impacts, diagnosisList); - // -> each Diagnosis yields 5 chunks => 10 chunks from both diagnosis - // -> HealthIndicatorResult surrounds the diagnosis list by 2 chunks - AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> 12); + // -> each Diagnosis yields 6 chunks => 12 chunks from both diagnosis + // -> HealthIndicatorResult surrounds the diagnosis list by 5 chunks + AbstractChunkedSerializingTestCase.assertChunkCount(result, ignored -> (6 * 2) + 5); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java index 902c69cef558e..5c63a60103139 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -50,7 +50,7 @@ public ChatCompletionResults(StreamInput in) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(COMPLETION, results.iterator()); + return ChunkedToXContent.builder(params).array(COMPLETION, results.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java index 187b186fcd91d..c961050acefdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -79,7 +79,7 @@ public List getChunkedResults() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(FIELD_NAME, chunkedResults.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunkedResults.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java index cc245c40c51e3..6bd66664068d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -64,7 +64,7 @@ public InferenceChunkedTextEmbeddingByteResults(StreamInput in) throws IOExcepti @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(FIELD_NAME, chunks.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunks.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java index 4b4d77cd3f043..369f22a807913 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; @@ -77,7 +77,7 @@ public static InferenceChunkedTextEmbeddingFloatResults ofMlResults(MlChunkedTex @Override public Iterator toXContentChunked(ToXContent.Params params) { // TODO add isTruncated flag - return ChunkedToXContentHelper.array(FIELD_NAME, chunks.iterator()); + return ChunkedToXContent.builder(params).array(FIELD_NAME, chunks.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index 16dca7b04d526..c1be1ce265f6b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContent; @@ -62,7 +62,7 @@ public int getFirstEmbeddingSize() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING_BYTES, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java index 9f9bdfec7cfae..8551250348928 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -103,7 +103,7 @@ public int getFirstEmbeddingSize() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(TEXT_EMBEDDING, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(TEXT_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index e331cdbc59358..9c764babe33fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; @@ -174,7 +174,7 @@ public List getRankedDocs() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(RERANK, rankedDocs.iterator()); + return ChunkedToXContent.builder(params).array(RERANK, rankedDocs.iterator()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index dd8229c604ecb..318a292b47730 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -72,7 +72,7 @@ public static SparseEmbeddingResults of(List results @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.array(SPARSE_EMBEDDING, embeddings.iterator()); + return ChunkedToXContent.builder(params).array(SPARSE_EMBEDDING, embeddings.iterator()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index a685687e8bfc6..d98613f1817ab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -9,11 +9,10 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -168,24 +167,24 @@ public DriverSleeps sleeps() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { - b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); - b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); - b.field("took_nanos", tookNanos); - if (b.humanReadable()) { - b.field("took_time", TimeValue.timeValueNanos(tookNanos)); - } - b.field("cpu_nanos", cpuNanos); - if (b.humanReadable()) { - b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); - } - b.field("iterations", iterations); - return b; - }), - ChunkedToXContentHelper.array("operators", operators.iterator()), - Iterators.single((b, p) -> b.field("sleeps", sleeps)), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(ob -> { + ob.append((b, p) -> { + b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); + b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); + b.field("took_nanos", tookNanos); + if (b.humanReadable()) { + b.field("took_time", TimeValue.timeValueNanos(tookNanos)); + } + b.field("cpu_nanos", cpuNanos); + if (b.humanReadable()) { + b.field("cpu_time", TimeValue.timeValueNanos(cpuNanos)); + } + b.field("iterations", iterations); + return b; + }); + ob.array("operators", operators.iterator()); + ob.field("sleeps", sleeps); + }); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index 24f2f287f4cd6..5d32c39e350a5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -11,12 +11,13 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -176,89 +177,56 @@ public long getTotalSamples() { @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.array( - "Edges", - Iterators.flatMap( - edges.iterator(), - perNodeEdges -> Iterators.concat( - ChunkedToXContentHelper.startArray(), - Iterators.map(perNodeEdges.entrySet().iterator(), edge -> (b, p) -> b.value(edge.getValue())), - ChunkedToXContentHelper.endArray() - ) - ) - ), - ChunkedToXContentHelper.array("FileID", Iterators.map(fileIds.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("FrameType", Iterators.map(frameTypes.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("Inline", Iterators.map(inlineFrames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("ExeFilename", Iterators.map(fileNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("AddressOrLine", Iterators.map(addressOrLines.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.array("FunctionName", Iterators.map(functionNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("FunctionOffset"); - for (int functionOffset : functionOffsets) { - b.value(functionOffset); - } - return b.endArray(); - }), - ChunkedToXContentHelper.array("SourceFilename", Iterators.map(sourceFileNames.iterator(), e -> (b, p) -> b.value(e))), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("SourceLine"); - for (int sourceLine : sourceLines) { - b.value(sourceLine); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("CountInclusive"); - for (long countInclusive : countInclusive) { - b.value(countInclusive); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("CountExclusive"); - for (long c : countExclusive) { - b.value(c); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCO2TonsInclusive"); - for (double co2Tons : annualCO2TonsInclusive) { - // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) - b.rawValue(NumberUtils.doubleToString(co2Tons)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCO2TonsExclusive"); - for (double co2Tons : annualCO2TonsExclusive) { - b.rawValue(NumberUtils.doubleToString(co2Tons)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCostsUSDInclusive"); - for (double costs : annualCostsUSDInclusive) { - b.rawValue(NumberUtils.doubleToString(costs)); - } - return b.endArray(); - }), - ChunkedToXContentHelper.singleChunk((b, p) -> { - b.startArray("AnnualCostsUSDExclusive"); - for (double costs : annualCostsUSDExclusive) { - b.rawValue(NumberUtils.doubleToString(costs)); - } - return b.endArray(); - }), - Iterators.single((b, p) -> b.field("Size", size)), - Iterators.single((b, p) -> b.field("SamplingRate", samplingRate)), - Iterators.single((b, p) -> b.field("SelfCPU", selfCPU)), - Iterators.single((b, p) -> b.field("TotalCPU", totalCPU)), - Iterators.single((b, p) -> b.field("TotalSamples", totalSamples)), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(ob -> { + ob.array("Edges", edges.iterator(), (eb, e) -> eb.array(intValues(e.values()))); + ob.array("FileID", fileIds.toArray(String[]::new)); + ob.array("FrameType", intValues(frameTypes)); + ob.array("Inline", inlineFrames.iterator(), e -> (b, p) -> b.value(e)); + ob.array("ExeFilename", fileNames.toArray(String[]::new)); + ob.array("AddressOrLine", intValues(addressOrLines)); + ob.array("FunctionName", functionNames.toArray(String[]::new)); + ob.array("FunctionOffset", intValues(functionOffsets)); + ob.array("SourceFilename", sourceFileNames.toArray(String[]::new)); + ob.array("SourceLine", intValues(sourceLines)); + ob.array("CountInclusive", longValues(countInclusive)); + ob.array("CountExclusive", longValues(countExclusive)); + ob.array("AnnualCO2TonsInclusive", doubleValues(annualCO2TonsInclusive)); + ob.array("AnnualCO2TonsExclusive", doubleValues(annualCO2TonsExclusive)); + ob.array("AnnualCostsUSDInclusive", doubleValues(annualCostsUSDInclusive)); + ob.array("AnnualCostsUSDExclusive", doubleValues(annualCostsUSDExclusive)); + ob.field("Size", size); + ob.field("SamplingRate", samplingRate); + ob.field("SelfCPU", selfCPU); + ob.field("TotalCPU", totalCPU); + ob.field("TotalSamples", totalSamples); + }); + } + + private static Iterator intValues(Collection values) { + return Iterators.single((b, p) -> { + for (Integer i : values) { + b.value(i); + } + return b; + }); + } + + private static Iterator longValues(Collection values) { + return Iterators.single((b, p) -> { + for (Long l : values) { + b.value(l); + } + return b; + }); + } + + private static Iterator doubleValues(Collection values) { + return Iterators.single((b, p) -> { + for (Double d : values) { + // write as raw value - we need direct control over the output representation (here: limit to 4 decimal places) + b.rawValue(NumberUtils.doubleToString(d)); + } + return b; + }); } } From eae3a426e7e5074f153fd665829303930a5849d3 Mon Sep 17 00:00:00 2001 From: Imad Saddik <79410781+ImadSaddik@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:22:29 +0100 Subject: [PATCH 48/67] Fixed hyperlink in search.asciidoc (#115156) --- docs/reference/search/search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 501d645665a02..2ad407b4ae1e4 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -38,7 +38,7 @@ must have the `read` index privilege for the alias's data streams or indices. Allows you to execute a search query and get back search hits that match the query. You can provide search queries using the <> or <>. +query string parameter>> or <>. [[search-search-api-path-params]] ==== {api-path-parms-title} From 9062154462cabf79dd77e24204783868249cfb02 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 21 Oct 2024 12:22:06 -0400 Subject: [PATCH 49/67] ESQL: Fix `REVERSE` with backspace character (#115245) * ESQL: Fix `REVERSE` with backspace character If the text contains a backspace character aka `0x28` aka ctrl-H then we should use the slow reverse path. This is going to be quite rare but our test data is sure good at making rare, fun stuff. Closes #115228 Closes #115227 Closes #114372 --- docs/changelog/115245.yaml | 8 ++++++++ muted-tests.yml | 6 ------ .../expression/function/scalar/string/Reverse.java | 13 ++++++------- 3 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/115245.yaml diff --git a/docs/changelog/115245.yaml b/docs/changelog/115245.yaml new file mode 100644 index 0000000000000..294328567c3aa --- /dev/null +++ b/docs/changelog/115245.yaml @@ -0,0 +1,8 @@ +pr: 115245 +summary: "ESQL: Fix `REVERSE` with backspace character" +area: ES|QL +type: bug +issues: + - 114372 + - 115227 + - 115228 diff --git a/muted-tests.yml b/muted-tests.yml index 4dc177ef001fd..4f3ba742d16fa 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,12 +346,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/115129 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT issue: https://github.com/elastic/elasticsearch/issues/115135 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests - method: testEvaluateInManyThreads {TestCase=} - issue: https://github.com/elastic/elasticsearch/issues/115227 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.string.ReverseTests - method: testEvaluateInManyThreads {TestCase=} - issue: https://github.com/elastic/elasticsearch/issues/115228 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java index bf4e47d8d0de4..e161566838cd9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -79,8 +78,6 @@ protected TypeResolution resolveType() { /** * Reverses a unicode string, keeping grapheme clusters together - * @param str - * @return */ public static String reverseStringWithUnicodeCharacters(String str) { BreakIterator boundary = BreakIterator.getCharacterInstance(Locale.ROOT); @@ -100,10 +97,12 @@ public static String reverseStringWithUnicodeCharacters(String str) { return reversed.toString(); } - private static boolean isOneByteUTF8(BytesRef ref) { + private static boolean reverseBytesIsReverseUnicode(BytesRef ref) { int end = ref.offset + ref.length; for (int i = ref.offset; i < end; i++) { - if (ref.bytes[i] < 0) { + if (ref.bytes[i] < 0 // Anything encoded in multibyte utf-8 + || ref.bytes[i] == 0x28 // Backspace + ) { return false; } } @@ -112,13 +111,13 @@ private static boolean isOneByteUTF8(BytesRef ref) { @Evaluator static BytesRef process(BytesRef val) { - if (isOneByteUTF8(val)) { + if (reverseBytesIsReverseUnicode(val)) { // this is the fast path. we know we can just reverse the bytes. BytesRef reversed = BytesRef.deepCopyOf(val); reverseArray(reversed.bytes, reversed.offset, reversed.length); return reversed; } - return BytesRefs.toBytesRef(reverseStringWithUnicodeCharacters(val.utf8ToString())); + return new BytesRef(reverseStringWithUnicodeCharacters(val.utf8ToString())); } @Override From f5ceafff4f3e987a128fd2ff2edbd3e0443a3c5b Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 21 Oct 2024 18:30:23 +0200 Subject: [PATCH 50/67] [Build] Fix checkstyle exclusions on windows (#115185) --- .../gradle/util/PlatformUtils.java | 23 +++++++++++++++++++ x-pack/plugin/esql/build.gradle | 7 +++--- x-pack/plugin/kql/build.gradle | 3 ++- 3 files changed, 29 insertions(+), 4 deletions(-) create mode 100644 build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java b/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java new file mode 100644 index 0000000000000..2f093a19032c8 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/util/PlatformUtils.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.util; + +import java.util.stream.Collectors; + +public class PlatformUtils { + + public static String normalize(String input) { + return input.lines() + .map(it -> it.replace('\\', '/')) + .map(it -> it.replaceAll("\\d+\\.\\d\\ds", "0.00s")) + .map(it -> it.replace("file:/./", "file:./")) + .collect(Collectors.joining("\n")); + } +} diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index c8d704cd2b8bf..766d0c0f13892 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; +import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -56,7 +57,7 @@ def generatedSourceDir = projectDirectory.dir("src/main/generated") tasks.named("compileJava").configure { options.compilerArgumentProviders.add(new SourceDirectoryCommandLineArgumentProvider(generatedSourceDir)) // IntelliJ sticks generated files here and we can't stop it.... - exclude { it.file.toString().contains("src/main/generated-src/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } } interface Injected { @@ -262,8 +263,8 @@ tasks.register("regen") { tasks.named("spotlessJava") { dependsOn stringTemplates } tasks.named('checkstyleMain').configure { excludes = [ "**/*.java.st" ] - exclude { it.file.toString().contains("src/main/generated-src/generated") } - exclude { it.file.toString().contains("src/main/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } + exclude { normalize(it.file.toString()).contains("src/main/generated") } } def prop(Type, type, TYPE, BYTES, Array) { diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index d1c949834b021..198099329c7c0 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -54,7 +55,7 @@ pluginManager.withPlugin('com.diffplug.spotless') { } } tasks.named('checkstyleMain').configure { - exclude { it.file.toString().contains("src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase") } + exclude { normalize(it.file.toString()).contains("src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase") } } tasks.register("cleanGenerated", Delete) { From 6b6c3670c2da3d8e8f886ae9c5ee0836ef16eb68 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Mon, 21 Oct 2024 19:48:34 +0200 Subject: [PATCH 51/67] ESQL: Add support for multivalue fields in Arrow output (#114774) --- docs/changelog/114774.yaml | 5 + x-pack/plugin/esql/arrow/build.gradle | 1 + .../xpack/esql/arrow/ArrowResponse.java | 74 +++-- .../xpack/esql/arrow/BlockConverter.java | 214 ++++++++++----- .../xpack/esql/arrow/ArrowResponseTests.java | 252 +++++++++++++++--- 5 files changed, 431 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/114774.yaml diff --git a/docs/changelog/114774.yaml b/docs/changelog/114774.yaml new file mode 100644 index 0000000000000..1becfe427fda0 --- /dev/null +++ b/docs/changelog/114774.yaml @@ -0,0 +1,5 @@ +pr: 114774 +summary: "ESQL: Add support for multivalue fields in Arrow output" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/arrow/build.gradle b/x-pack/plugin/esql/arrow/build.gradle index 20c877a12bf0d..fac0bd0a77452 100644 --- a/x-pack/plugin/esql/arrow/build.gradle +++ b/x-pack/plugin/esql/arrow/build.gradle @@ -26,6 +26,7 @@ dependencies { testImplementation project(':test:framework') testImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') + testImplementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${versions.jackson}") } tasks.named("dependencyLicenses").configure { diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java index 7a8328060a390..208d3308d508b 100644 --- a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java @@ -17,6 +17,7 @@ import org.apache.arrow.vector.ipc.message.MessageSerializer; import org.apache.arrow.vector.types.Types.MinorType; import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.types.pojo.Schema; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; @@ -44,6 +45,7 @@ public class ArrowResponse implements ChunkedRestResponseBodyPart, Releasable { public static class Column { private final BlockConverter converter; private final String name; + private boolean multivalued; public Column(String esqlType, String name) { this.converter = ESQL_CONVERTERS.get(esqlType); @@ -61,20 +63,24 @@ public Column(String esqlType, String name) { public ArrowResponse(List columns, List pages) { this.columns = columns; + // Find multivalued columns + int colSize = columns.size(); + for (int col = 0; col < colSize; col++) { + for (Page page : pages) { + if (page.getBlock(col).mayHaveMultivaluedFields()) { + columns.get(col).multivalued = true; + break; + } + } + } + currentSegment = new SchemaResponse(this); List rest = new ArrayList<>(pages.size()); - for (int p = 0; p < pages.size(); p++) { - var page = pages.get(p); + + for (Page page : pages) { rest.add(new PageResponse(this, page)); - // Multivalued fields are not supported yet. - for (int b = 0; b < page.getBlockCount(); b++) { - if (page.getBlock(b).mayHaveMultivaluedFields()) { - throw new IllegalArgumentException( - "ES|QL response field [" + columns.get(b).name + "] is multi-valued. This isn't supported yet by the Arrow format" - ); - } - } } + rest.add(new EndResponse(this)); segments = rest.iterator(); } @@ -185,6 +191,9 @@ public void close() {} * @see IPC Streaming Format */ private static class SchemaResponse extends ResponseSegment { + + private static final FieldType LIST_FIELD_TYPE = FieldType.nullable(MinorType.LIST.getType()); + private boolean done = false; SchemaResponse(ArrowResponse response) { @@ -204,7 +213,20 @@ protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws I } private Schema arrowSchema() { - return new Schema(response.columns.stream().map(c -> new Field(c.name, c.converter.arrowFieldType(), List.of())).toList()); + return new Schema(response.columns.stream().map(c -> { + var fieldType = c.converter.arrowFieldType(); + if (c.multivalued) { + // A variable-sized list is a vector of offsets and a child vector of values + // See https://arrow.apache.org/docs/format/Columnar.html#variable-size-list-layout + var listType = new FieldType(true, LIST_FIELD_TYPE.getType(), null, fieldType.getMetadata()); + // Value vector is non-nullable (ES|QL multivalues cannot contain nulls). + var valueType = new FieldType(false, fieldType.getType(), fieldType.getDictionary(), null); + // The nested vector is named "$data$", following what the Arrow/Java library does. + return new Field(c.name, listType, List.of(new Field("$data$", valueType, null))); + } else { + return new Field(c.name, fieldType, null); + } + }).toList()); } } @@ -257,7 +279,14 @@ protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws I @Override public void write(ArrowBuf buffer) throws IOException { - extraPosition += bufWriters.get(bufIdx++).write(out); + var len = bufWriters.get(bufIdx++).write(out); + // Consistency check + if (len != buffer.writerIndex()) { + throw new IllegalStateException( + "Buffer [" + (bufIdx - 1) + "]: wrote [" + len + "] bytes, but expected [" + buffer.writerIndex() + "]" + ); + } + extraPosition += len; } @Override @@ -277,11 +306,26 @@ public long align() throws IOException { // Create Arrow buffers for each of the blocks in this page for (int b = 0; b < page.getBlockCount(); b++) { - var converter = response.columns.get(b).converter; + var column = response.columns.get(b); + var converter = column.converter; Block block = page.getBlock(b); - nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); - converter.convert(block, bufs, bufWriters); + if (column.multivalued) { + // List node. + nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); + // Value vector, does not contain nulls. + nodes.add(new ArrowFieldNode(BlockConverter.valueCount(block), 0)); + } else { + nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); + } + converter.convert(block, column.multivalued, bufs, bufWriters); + } + + // Consistency check + if (bufs.size() != bufWriters.size()) { + throw new IllegalStateException( + "Inconsistent Arrow buffers: [" + bufs.size() + "] buffers and [" + bufWriters.size() + "] writers" + ); } // Create the batch and serialize it diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java index 0a65792ab8e13..2a305cfdbc503 100644 --- a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java @@ -71,10 +71,11 @@ public interface BufWriter { /** * Convert a block into Arrow buffers. * @param block the ESQL block + * @param multivalued is this column multivalued? This block may not, but some blocks in that column are. * @param bufs arrow buffers, used to track sizes * @param bufWriters buffer writers, that will do the actual work of writing the data */ - public abstract void convert(Block block, List bufs, List bufWriters); + public abstract void convert(Block block, boolean multivalued, List bufs, List bufWriters); /** * Conversion of Double blocks @@ -86,28 +87,31 @@ public AsFloat64(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { DoubleBlock block = (DoubleBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); - bufs.add(dummyArrowBuf(vectorLength(block))); + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeDoubleLE(block.getDouble(i)); } - return vectorLength(block); + return (long) count * Double.BYTES; }); } - private static int vectorLength(DoubleBlock b) { - return Double.BYTES * b.getPositionCount(); + private static int vectorByteSize(DoubleBlock b) { + return Double.BYTES * BlockConverter.valueCount(b); } } @@ -121,28 +125,31 @@ public AsInt32(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { IntBlock block = (IntBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); - bufs.add(dummyArrowBuf(vectorLength(block))); + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeIntLE(block.getInt(i)); } - return vectorLength(block); + return (long) count * Integer.BYTES; }); } - private static int vectorLength(IntBlock b) { - return Integer.BYTES * b.getPositionCount(); + private static int vectorByteSize(Block b) { + return Integer.BYTES * BlockConverter.valueCount(b); } } @@ -159,27 +166,31 @@ protected AsInt64(String esqlType, Types.MinorType minorType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { LongBlock block = (LongBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); - bufs.add(dummyArrowBuf(vectorLength(block))); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); + + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - return BlockConverter.writeZeroes(out, vectorLength(block)); + return BlockConverter.writeZeroes(out, vectorByteSize(block)); } // TODO could we "just" get the memory of the array and dump it? - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); for (int i = 0; i < count; i++) { out.writeLongLE(block.getLong(i)); } - return vectorLength(block); + return (long) count * Long.BYTES; }); } - private static int vectorLength(LongBlock b) { - return Long.BYTES * b.getPositionCount(); + private static int vectorByteSize(LongBlock b) { + return Long.BYTES * BlockConverter.valueCount(b); } } @@ -192,13 +203,17 @@ public AsBoolean(String esqlType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BooleanBlock block = (BooleanBlock) b; - accumulateVectorValidity(bufs, bufWriters, block); - bufs.add(dummyArrowBuf(vectorLength(block))); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); + + bufs.add(dummyArrowBuf(vectorByteSize(block))); bufWriters.add(out -> { - int count = block.getPositionCount(); + int count = BlockConverter.valueCount(block); BitSet bits = new BitSet(); // Only set the bits that are true, writeBitSet will take @@ -215,8 +230,8 @@ public void convert(Block b, List bufs, List bufWriters) { }); } - private static int vectorLength(BooleanBlock b) { - return BlockConverter.bitSetLength(b.getPositionCount()); + private static int vectorByteSize(BooleanBlock b) { + return BlockConverter.bitSetLength(BlockConverter.valueCount(b)); } } @@ -230,27 +245,30 @@ public BytesRefConverter(String esqlType, Types.MinorType minorType) { } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BytesRefBlock block = (BytesRefBlock) b; - BlockConverter.accumulateVectorValidity(bufs, bufWriters, block); + if (multivalued) { + addListOffsets(bufs, bufWriters, block); + } + accumulateVectorValidity(bufs, bufWriters, block, multivalued); // Offsets vector - bufs.add(dummyArrowBuf(offsetVectorLength(block))); + bufs.add(dummyArrowBuf(offsetvectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { - var count = block.getPositionCount() + 1; + var count = valueCount(block) + 1; for (int i = 0; i < count; i++) { out.writeIntLE(0); } - return offsetVectorLength(block); + return offsetvectorByteSize(block); } // TODO could we "just" get the memory of the array and dump it? BytesRef scratch = new BytesRef(); int offset = 0; - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { out.writeIntLE(offset); // FIXME: add a ByteRefsVector.getLength(position): there are some cases // where getBytesRef will allocate, which isn't needed here. @@ -259,11 +277,11 @@ public void convert(Block b, List bufs, List bufWriters) { offset += v.length; } out.writeIntLE(offset); - return offsetVectorLength(block); + return offsetvectorByteSize(block); }); // Data vector - bufs.add(BlockConverter.dummyArrowBuf(dataVectorLength(block))); + bufs.add(BlockConverter.dummyArrowBuf(dataVectorByteSize(block))); bufWriters.add(out -> { if (block.areAllValuesNull()) { @@ -273,7 +291,7 @@ public void convert(Block b, List bufs, List bufWriters) { // TODO could we "just" get the memory of the array and dump it? BytesRef scratch = new BytesRef(); long length = 0; - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { BytesRef v = block.getBytesRef(i, scratch); out.write(v.bytes, v.offset, v.length); @@ -283,11 +301,11 @@ public void convert(Block b, List bufs, List bufWriters) { }); } - private static int offsetVectorLength(BytesRefBlock block) { - return Integer.BYTES * (block.getPositionCount() + 1); + private static int offsetvectorByteSize(BytesRefBlock block) { + return Integer.BYTES * (valueCount(block) + 1); } - private int dataVectorLength(BytesRefBlock block) { + private int dataVectorByteSize(BytesRefBlock block) { if (block.areAllValuesNull()) { return 0; } @@ -296,7 +314,7 @@ private int dataVectorLength(BytesRefBlock block) { int length = 0; BytesRef scratch = new BytesRef(); - for (int i = 0; i < block.getPositionCount(); i++) { + for (int i = 0; i < valueCount(block); i++) { BytesRef v = block.getBytesRef(i, scratch); length += v.length; } @@ -323,10 +341,10 @@ public TransformedBytesRef(String esqlType, Types.MinorType minorType, BiFunctio } @Override - public void convert(Block b, List bufs, List bufWriters) { + public void convert(Block b, boolean multivalued, List bufs, List bufWriters) { BytesRefBlock block = (BytesRefBlock) b; try (BytesRefBlock transformed = transformValues(block)) { - super.convert(transformed, bufs, bufWriters); + super.convert(transformed, multivalued, bufs, bufWriters); } } @@ -336,20 +354,40 @@ public void convert(Block b, List bufs, List bufWriters) { private BytesRefBlock transformValues(BytesRefBlock block) { try (BytesRefBlock.Builder builder = block.blockFactory().newBytesRefBlockBuilder(block.getPositionCount())) { BytesRef scratch = new BytesRef(); - for (int i = 0; i < block.getPositionCount(); i++) { - if (block.isNull(i)) { - builder.appendNull(); - } else { - BytesRef bytes = block.getBytesRef(i, scratch); - if (bytes.length != 0) { - bytes = valueConverter.apply(bytes, scratch); + if (block.mayHaveMultivaluedFields() == false) { + for (int pos = 0; pos < valueCount(block); pos++) { + if (block.isNull(pos)) { + builder.appendNull(); + } else { + convertAndAppend(builder, block, pos, scratch); + } + } + } else { + for (int pos = 0; pos < block.getPositionCount(); pos++) { + if (block.isNull(pos)) { + builder.appendNull(); + } else { + builder.beginPositionEntry(); + int startPos = block.getFirstValueIndex(pos); + int lastPos = block.getFirstValueIndex(pos + 1); + for (int valuePos = startPos; valuePos < lastPos; valuePos++) { + convertAndAppend(builder, block, valuePos, scratch); + } + builder.endPositionEntry(); } - builder.appendBytesRef(bytes); } } return builder.build(); } } + + private void convertAndAppend(BytesRefBlock.Builder builder, BytesRefBlock block, int position, BytesRef scratch) { + BytesRef bytes = block.getBytesRef(position, scratch); + if (bytes.length != 0) { + bytes = valueConverter.apply(bytes, scratch); + } + builder.appendBytesRef(bytes); + } } public static class AsVarChar extends BytesRefConverter { @@ -370,7 +408,7 @@ public AsNull(String esqlType) { } @Override - public void convert(Block block, List bufs, List bufWriters) { + public void convert(Block block, boolean multivalued, List bufs, List bufWriters) { // Null vector in arrow has no associated buffers // See https://arrow.apache.org/docs/format/Columnar.html#null-layout } @@ -386,15 +424,38 @@ private static int bitSetLength(int totalValues) { return (totalValues + 7) / 8; } - private static void accumulateVectorValidity(List bufs, List bufWriters, Block b) { - bufs.add(dummyArrowBuf(bitSetLength(b.getPositionCount()))); + /** + * Get the value count for a block. For single-valued blocks this is the same as the position count. + * For multivalued blocks, this is the flattened number of items. + */ + static int valueCount(Block block) { + int result = block.getFirstValueIndex(block.getPositionCount()); + + // firstValueIndex is always zero for all-null blocks. + if (result == 0 && block.areAllValuesNull()) { + result = block.getPositionCount(); + } + + return result; + } + + private static void accumulateVectorValidity(List bufs, List bufWriters, Block b, boolean multivalued) { + // If that block is in a multivalued-column, validities are output in the parent Arrow List buffer (values themselves + // do not contain nulls per docvalues limitations). + if (multivalued || b.mayHaveNulls() == false) { + // Arrow IPC allows a compact form for "all true" validities using an empty buffer. + bufs.add(dummyArrowBuf(0)); + bufWriters.add(w -> 0); + return; + } + + int valueCount = b.getPositionCount(); + bufs.add(dummyArrowBuf(bitSetLength(valueCount))); bufWriters.add(out -> { - if (b.mayHaveNulls() == false) { - return writeAllTrueValidity(out, b.getPositionCount()); - } else if (b.areAllValuesNull()) { - return writeAllFalseValidity(out, b.getPositionCount()); + if (b.areAllValuesNull()) { + return writeAllFalseValidity(out, valueCount); } else { - return writeValidities(out, b); + return writeValidities(out, b, valueCount); } }); } @@ -420,10 +481,10 @@ private static long writeAllFalseValidity(RecyclerBytesStreamOutput out, int val return count; } - private static long writeValidities(RecyclerBytesStreamOutput out, Block block) { - int valueCount = block.getPositionCount(); + private static long writeValidities(RecyclerBytesStreamOutput out, Block block, int valueCount) { BitSet bits = new BitSet(valueCount); for (int i = 0; i < block.getPositionCount(); i++) { + // isNull is value indices, not multi-value positions if (block.isNull(i) == false) { bits.set(i); } @@ -449,4 +510,29 @@ private static long writeZeroes(RecyclerBytesStreamOutput out, int byteCount) { } return byteCount; } + + private static void addListOffsets(List bufs, List bufWriters, Block block) { + // Add validity buffer + accumulateVectorValidity(bufs, bufWriters, block, false); + + // Add offsets buffer + int bufferLen = Integer.BYTES * (block.getPositionCount() + 1); + + bufs.add(dummyArrowBuf(bufferLen)); + bufWriters.add(out -> { + if (block.mayHaveMultivaluedFields()) { + // '<=' is intentional to write the end position of the last item + for (int i = 0; i <= block.getPositionCount(); i++) { + // TODO could we get the block's firstValueIndexes and dump it? + out.writeIntLE(block.getFirstValueIndex(i)); + } + } else { + for (int i = 0; i <= block.getPositionCount(); i++) { + out.writeIntLE(i); + } + } + + return bufferLen; + }); + } } diff --git a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java index cf49b37db2805..b187e49554f8b 100644 --- a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java +++ b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java @@ -19,7 +19,10 @@ import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; import org.apache.arrow.vector.ipc.ArrowStreamReader; +import org.apache.arrow.vector.ipc.ArrowStreamWriter; import org.apache.arrow.vector.util.VectorSchemaRootAppender; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; @@ -34,7 +37,6 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVectorBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.test.ESTestCase; @@ -42,6 +44,8 @@ import org.elasticsearch.xpack.versionfield.Version; import org.junit.AfterClass; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -75,6 +79,7 @@ public static void afterClass() throws Exception { // Value creation, getters for ESQL and Arrow static final ValueType INTEGER_VALUES = new ValueTypeImpl( + "integer", factory -> factory.newIntBlockBuilder(0), block -> block.appendInt(randomInt()), (block, i, scratch) -> block.getInt(i), @@ -82,6 +87,7 @@ public static void afterClass() throws Exception { ); static final ValueType LONG_VALUES = new ValueTypeImpl( + "long", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -89,6 +95,7 @@ public static void afterClass() throws Exception { ); static final ValueType ULONG_VALUES = new ValueTypeImpl( + "ulong", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -96,6 +103,7 @@ public static void afterClass() throws Exception { ); static final ValueType DATE_VALUES = new ValueTypeImpl( + "date", factory -> factory.newLongBlockBuilder(0), block -> block.appendLong(randomLong()), (block, i, scratch) -> block.getLong(i), @@ -103,6 +111,7 @@ public static void afterClass() throws Exception { ); static final ValueType DOUBLE_VALUES = new ValueTypeImpl( + "double", factory -> factory.newDoubleBlockBuilder(0), block -> block.appendDouble(randomDouble()), (block, i, scratch) -> block.getDouble(i), @@ -110,6 +119,7 @@ public static void afterClass() throws Exception { ); static final ValueType BOOLEAN_VALUES = new ValueTypeImpl( + "boolean", factory -> factory.newBooleanBlockBuilder(0), block -> block.appendBoolean(randomBoolean()), (b, i, s) -> b.getBoolean(i), @@ -117,21 +127,23 @@ public static void afterClass() throws Exception { ); static final ValueType TEXT_VALUES = new ValueTypeImpl( + "text", factory -> factory.newBytesRefBlockBuilder(0), - block -> block.appendBytesRef(new BytesRef("🚀" + randomAlphaOfLengthBetween(1, 20))), + block -> block.appendBytesRef(new BytesRef(randomUnicodeOfLengthBetween(1, 20))), (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) ); static final ValueType SOURCE_VALUES = new ValueTypeImpl( + "source", factory -> factory.newBytesRefBlockBuilder(0), - // Use a constant value, conversion is tested separately - block -> block.appendBytesRef(new BytesRef("{\"foo\": 42}")), + block -> block.appendBytesRef(new BytesRef("{\"foo\": " + randomIntBetween(-42, 42) + "}")), (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) ); static final ValueType IP_VALUES = new ValueTypeImpl( + "ip", factory -> factory.newBytesRefBlockBuilder(0), block -> { byte[] addr = InetAddressPoint.encode(randomIp(randomBoolean())); @@ -143,6 +155,7 @@ public static void afterClass() throws Exception { ); static final ValueType BINARY_VALUES = new ValueTypeImpl( + "binary", factory -> factory.newBytesRefBlockBuilder(0), block -> block.appendBytesRef(new BytesRef(randomByteArrayOfLength(randomIntBetween(1, 100)))), BytesRefBlock::getBytesRef, @@ -150,6 +163,7 @@ public static void afterClass() throws Exception { ); static final ValueType VERSION_VALUES = new ValueTypeImpl( + "version", factory -> factory.newBytesRefBlockBuilder(0), block -> block.appendBytesRef(new Version(between(0, 100) + "." + between(0, 100) + "." + between(0, 100)).toBytesRef()), (b, i, s) -> new Version(b.getBytesRef(i, s)).toString(), @@ -157,6 +171,7 @@ public static void afterClass() throws Exception { ); static final ValueType NULL_VALUES = new ValueTypeImpl( + "null", factory -> factory.newBytesRefBlockBuilder(0), Block.Builder::appendNull, (b, i, s) -> b.isNull(i) ? null : "non-null in block", @@ -201,9 +216,10 @@ public void testTestHarness() { TestBlock emptyBlock = TestBlock.create(BLOCK_FACTORY, testColumn, Density.Empty, 7); // Test that density works as expected - assertTrue(denseBlock.block instanceof IntVectorBlock); - assertEquals("IntArrayBlock", sparseBlock.block.getClass().getSimpleName()); // non-public class - assertEquals("ConstantNullBlock", emptyBlock.block.getClass().getSimpleName()); + assertFalse(denseBlock.block.mayHaveNulls()); + assertTrue(sparseBlock.block.mayHaveNulls()); + assertFalse(sparseBlock.block.areAllValuesNull()); + assertTrue(emptyBlock.block.areAllValuesNull()); // Test that values iterator scans all pages List pages = Stream.of(denseBlock, sparseBlock, emptyBlock).map(b -> new TestPage(List.of(b))).toList(); @@ -229,7 +245,7 @@ public void testTestHarness() { */ public void testSingleColumn() throws IOException { for (var type : VALUE_TYPES.keySet()) { - TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type), false); List pages = new ArrayList<>(); for (var density : Density.values()) { @@ -248,7 +264,7 @@ public void testSingleBlock() throws IOException { String type = "text"; Density density = Density.Dense; - TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type), false); List pages = new ArrayList<>(); TestBlock testBlock = TestBlock.create(BLOCK_FACTORY, testColumn, density, 10); @@ -261,44 +277,156 @@ public void testSingleBlock() throws IOException { } /** - * Test that multivalued arrays are rejected + * Test a multivalued field with fixed size values. */ - public void testMultivaluedField() throws IOException { + public void testMultivaluedInteger() throws IOException { IntBlock.Builder builder = BLOCK_FACTORY.newIntBlockBuilder(0); + builder.beginPositionEntry(); builder.appendInt(42); + builder.appendInt(43); + builder.endPositionEntry(); + + // The multivalue can be null, but a multivalue cannot contain nulls. + // Calling appendNull within a begin/endEntry causes consistency checks to fail in build() + // See also https://github.com/elastic/elasticsearch/issues/114324 builder.appendNull(); + builder.beginPositionEntry(); builder.appendInt(44); builder.appendInt(45); builder.endPositionEntry(); + + // single value builder.appendInt(46); + IntBlock block = builder.build(); + builder.close(); - // Consistency check + // Consistency check. + // AbstractArrayBlock.assertInvariants does some of these consistency checks, but those below + // specifically verify the assumptions on which the conversion to Arrow is built. assertTrue(block.mayHaveMultivaluedFields()); + assertEquals(4, block.getPositionCount()); // counts null entries + assertEquals(5, block.getTotalValueCount()); // nulls aren't counted + + // Value 0 + assertEquals(2, block.getValueCount(0)); assertEquals(0, block.getFirstValueIndex(0)); - assertEquals(1, block.getValueCount(0)); + assertEquals(42, block.getInt(block.getFirstValueIndex(0))); + assertEquals(43, block.getInt(block.getFirstValueIndex(0) + 1)); - // null values still use one position in the array + // Value 1 assertEquals(0, block.getValueCount(1)); - assertEquals(1, block.getFirstValueIndex(1)); - assertTrue(block.isNull(1)); - assertEquals(0, block.getInt(1)); + assertTrue(block.isNull(1)); // This is the position index, not value index + // No value, but still occupies a value slot with zero + assertEquals(2, block.getFirstValueIndex(1)); + assertEquals(0, block.getInt(block.getFirstValueIndex(1))); + assertEquals(3, block.getFirstValueIndex(2)); - assertEquals(2, block.getFirstValueIndex(2)); + // Value 2 assertEquals(2, block.getValueCount(2)); - assertEquals(2, block.getFirstValueIndex(2)); + assertEquals(3, block.getFirstValueIndex(2)); + assertEquals(44, block.getInt(block.getFirstValueIndex(2))); assertEquals(45, block.getInt(block.getFirstValueIndex(2) + 1)); - assertEquals(4, block.getFirstValueIndex(3)); + // Value 3 + assertEquals(1, block.getValueCount(3)); + assertEquals(5, block.getFirstValueIndex(3)); + assertEquals(46, block.getInt(block.getFirstValueIndex(3))); - var column = TestColumn.create("some-field", "integer"); - TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(new TestBlock(column, block, Density.Dense))))); + // End of block + assertEquals(6, block.getFirstValueIndex(4)); - IllegalArgumentException exc = assertThrows(IllegalArgumentException.class, () -> compareEsqlAndArrow(testCase)); + var column = TestColumn.create("some-field", "integer", true); + TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(TestBlock.create(column, block))))); - assertEquals("ES|QL response field [some-field] is multi-valued. This isn't supported yet by the Arrow format", exc.getMessage()); + compareEsqlAndArrow(testCase); + } + + /** + * Test a multivalued field with variable size values. + */ + public void testMultivalueString() throws IOException { + BytesRefBlock.Builder builder = BLOCK_FACTORY.newBytesRefBlockBuilder(0); + + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("a")); + builder.appendBytesRef(new BytesRef("b")); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("c")); + builder.appendBytesRef(new BytesRef("d")); + builder.endPositionEntry(); + + BytesRefBlock block = builder.build(); + builder.close(); + + var column = TestColumn.create("some-field", "text"); + TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(TestBlock.create(column, block))))); + + compareEsqlAndArrow(testCase); + } + + // Test exercising Arrow's multivalue API + public void testMultiValueArrow() throws IOException { + + byte[] bytes; + + try (ListVector listVector = ListVector.empty("some-field", ALLOCATOR)) { + UnionListWriter writer = listVector.getWriter(); + + writer.startList(); + writer.writeInt(42); // 0x2A + writer.writeInt(43); // 0x2A + writer.endList(); + + writer.startList(); + // Size is zero without a writeNull() + writer.writeNull(); // Adds a null value in that list + writer.endList(); + + writer.startList(); + writer.writeInt(44); // 0x2C + writer.writeInt(45); // 0x2D + writer.endList(); + + writer.startList(); + writer.writeInt(46); // 0x2E + writer.endList(); + + listVector.setValueCount(4); + bytes = getBytes(listVector); + } + + try (var reader = new ArrowStreamReader(new ByteArrayInputStream(bytes), ALLOCATOR)) { + var root = reader.getVectorSchemaRoot(); + reader.loadNextBatch(); + + ListVector listVector = (ListVector) root.getVector("some-field"); + + assertEquals(4, listVector.getValueCount()); + assertEquals(List.of(42, 43), listVector.getObject(0)); + assertEquals(Collections.singletonList((Integer) null), listVector.getObject(1)); + assertEquals(List.of(44, 45), listVector.getObject(2)); + assertEquals(List.of(46), listVector.getObject(3)); + } + } + + private static byte[] getBytes(ListVector listVector) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + var fields = List.of(listVector.getField()); + List vectors = List.of(listVector); + + try ( + VectorSchemaRoot root = new VectorSchemaRoot(fields, vectors); + ArrowStreamWriter arrowWriter = new ArrowStreamWriter(root, null, baos); + ) { + arrowWriter.start(); + arrowWriter.writeBatch(); + arrowWriter.end(); + } + return baos.toByteArray(); } /** @@ -319,10 +447,6 @@ public void testRandomTypesAndSize() throws IOException { .toList(); TestCase testCase = new TestCase(columns, pages); - // System.out.println(testCase); - // for (TestPage page: pages) { - // System.out.println(page); - // } compareEsqlAndArrow(testCase); } @@ -347,8 +471,13 @@ private void compareEsqlAndArrow(TestCase testCase, VectorSchemaRoot root) { var esqlValuesIterator = new EsqlValuesIterator(testCase, i); var arrowValuesIterator = new ArrowValuesIterator(testCase, root, i); + int line = 0; + while (esqlValuesIterator.hasNext() && arrowValuesIterator.hasNext()) { - assertEquals(esqlValuesIterator.next(), arrowValuesIterator.next()); + Object esqlValue = esqlValuesIterator.next(); + Object arrowValue = arrowValuesIterator.next(); + assertEquals(("line " + line), esqlValue, arrowValue); + line++; } // Make sure we entirely consumed both sides. @@ -387,7 +516,6 @@ private VectorSchemaRoot toArrowVectors(TestCase testCase) throws IOException { static class EsqlValuesIterator implements Iterator { private final int fieldPos; private final ValueType type; - private final BytesRef scratch = new BytesRef(); private final Iterator pages; private TestPage page; @@ -412,7 +540,7 @@ public Object next() { throw new NoSuchElementException(); } Block block = page.blocks.get(fieldPos).block; - Object result = block.isNull(position) ? null : type.valueAt(block, position, scratch); + Object result = block.isNull(position) ? null : type.valueAt(block, position, new BytesRef()); position++; if (position >= block.getPositionCount()) { position = 0; @@ -475,9 +603,13 @@ public String toString() { } } - record TestColumn(String name, String type, ValueType valueType) { + record TestColumn(String name, String type, ValueType valueType, boolean multivalue) { static TestColumn create(String name, String type) { - return new TestColumn(name, type, VALUE_TYPES.get(type)); + return create(name, type, randomBoolean()); + } + + static TestColumn create(String name, String type, boolean multivalue) { + return new TestColumn(name, type, VALUE_TYPES.get(type), multivalue); } } @@ -498,6 +630,18 @@ public String toString() { record TestBlock(TestColumn column, Block block, Density density) { + static TestBlock create(TestColumn column, Block block) { + Density density; + if (block.areAllValuesNull()) { + density = Density.Empty; + } else if (block.mayHaveNulls()) { + density = Density.Sparse; + } else { + density = Density.Dense; + } + return new TestBlock(column, block, density); + } + static TestBlock create(BlockFactory factory, TestColumn column, int positions) { return create(factory, column, randomFrom(Density.values()), positions); } @@ -517,10 +661,21 @@ static TestBlock create(BlockFactory factory, TestColumn column, Density density start = 2; } for (int i = start; i < positions; i++) { - valueType.addValue(builder, density); + // If multivalued, randomly insert a series of values if the type isn't null (nulls are not allowed in multivalues) + if (column.multivalue && column.valueType != NULL_VALUES && randomBoolean()) { + builder.beginPositionEntry(); + int numEntries = randomIntBetween(2, 5); + for (int j = 0; j < numEntries; j++) { + valueType.addValue(builder, Density.Dense); + } + builder.endPositionEntry(); + } else { + valueType.addValue(builder, density); + } } // Will create an ArrayBlock if there are null values, VectorBlock otherwise block = builder.build(); + assertEquals(positions, block.getPositionCount()); } return new TestBlock(column, block, density); } @@ -553,17 +708,20 @@ interface ValueType { public static class ValueTypeImpl implements ValueType { + private final String name; private final Function builderCreator; private final Consumer valueAdder; private final TriFunction blockGetter; private final BiFunction vectorGetter; public ValueTypeImpl( + String name, Function builderCreator, Consumer valueAdder, TriFunction blockGetter, BiFunction vectorGetter ) { + this.name = name; this.builderCreator = builderCreator; this.valueAdder = valueAdder; this.blockGetter = blockGetter; @@ -588,13 +746,35 @@ public void addValue(Block.Builder builder, Density density) { @Override @SuppressWarnings("unchecked") public Object valueAt(Block block, int position, BytesRef scratch) { - return blockGetter.apply((BlockT) block, position, scratch); + // Build the list of values + var values = new ArrayList<>(); + for (int i = block.getFirstValueIndex(position); i < block.getFirstValueIndex(position + 1); i++) { + values.add(blockGetter.apply((BlockT) block, i, scratch)); + scratch = new BytesRef(); // do not overwrite previous value + } + return values.size() == 1 ? values.getFirst() : values; } @Override @SuppressWarnings("unchecked") public Object valueAt(ValueVector arrowVec, int position) { - return vectorGetter.apply((VectorT) arrowVec, position); + if (arrowVec instanceof ListVector listVector) { + var type = listVector.getField().getMetadata().get("elastic:type"); + // Build the list of values + var valueVec = listVector.getDataVector(); + var values = new ArrayList<>(); + for (int i = listVector.getElementStartIndex(position); i < listVector.getElementEndIndex(position); i++) { + values.add(vectorGetter.apply((VectorT) valueVec, i)); + } + return values.size() == 1 ? values.getFirst() : values; + } else { + return vectorGetter.apply((VectorT) arrowVec, position); + } + } + + @Override + public String toString() { + return name; } } } From ffcd62e32bd03a1ed52afb06c3510f5b76361683 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Mon, 21 Oct 2024 12:01:46 -0600 Subject: [PATCH 52/67] Fix test - times can be 0 sometimes (#115260) --- muted-tests.yml | 6 ------ .../test/cluster.stats/30_ccs_stats.yml | 16 ++++++++-------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4f3ba742d16fa..482966e0f97f9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -256,9 +256,6 @@ tests: - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest_ErrorResponse issue: https://github.com/elastic/elasticsearch/issues/114327 -- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT - method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} - issue: https://github.com/elastic/elasticsearch/issues/114371 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114385 @@ -311,9 +308,6 @@ tests: - class: org.elasticsearch.packaging.test.EnrollmentProcessTests method: test20DockerAutoFormCluster issue: https://github.com/elastic/elasticsearch/issues/114885 -- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} - issue: https://github.com/elastic/elasticsearch/issues/114902 - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml index 955c68634e617..689c58dad31e6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/30_ccs_stats.yml @@ -121,10 +121,10 @@ - is_true: ccs._search.total - is_true: ccs._search.success - exists: ccs._search.skipped - - is_true: ccs._search.took - - is_true: ccs._search.took.max - - is_true: ccs._search.took.avg - - is_true: ccs._search.took.p90 + - exists: ccs._search.took + - exists: ccs._search.took.max + - exists: ccs._search.took.avg + - exists: ccs._search.took.p90 - is_true: ccs._search.took_mrt_true - exists: ccs._search.took_mrt_true.max - exists: ccs._search.took_mrt_true.avg @@ -145,7 +145,7 @@ - gte: {ccs._search.clusters.cluster_two.total: 1} - exists: ccs._search.clusters.cluster_one.skipped - exists: ccs._search.clusters.cluster_two.skipped - - is_true: ccs._search.clusters.cluster_one.took - - is_true: ccs._search.clusters.cluster_one.took.max - - is_true: ccs._search.clusters.cluster_one.took.avg - - is_true: ccs._search.clusters.cluster_one.took.p90 + - exists: ccs._search.clusters.cluster_one.took + - exists: ccs._search.clusters.cluster_one.took.max + - exists: ccs._search.clusters.cluster_one.took.avg + - exists: ccs._search.clusters.cluster_one.took.p90 From 7ceb4d85a942bcc0796033e1394ca9d4ebd430cf Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 21 Oct 2024 20:10:35 +0200 Subject: [PATCH 53/67] [ESQL] Make sure we built consistent OrdinalBytesRefBlock in BlockHashRandomizedTests (#115081) --- .../blockhash/BlockHashRandomizedTests.java | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 76d4caf810eb8..42ac4cf2ff917 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -475,17 +475,14 @@ public ElementType elementType() { @Override public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { - List> dictionary = new ArrayList<>(); + Map dictionary = new HashMap<>(); + Set keys = dictionary(maxValuesPerPosition); List> values = new ArrayList<>(positionCount); try ( IntBlock.Builder ordinals = TestBlockFactory.getNonBreakingInstance() .newIntBlockBuilder(positionCount * maxValuesPerPosition); BytesRefVector.Builder bytes = TestBlockFactory.getNonBreakingInstance().newBytesRefVectorBuilder(maxValuesPerPosition); ) { - for (String value : dictionary(maxValuesPerPosition)) { - bytes.appendBytesRef(new BytesRef(value)); - dictionary.add(Map.entry(value, dictionary.size())); - } for (int p = 0; p < positionCount; p++) { int valueCount = between(1, maxValuesPerPosition); int dupCount = between(0, dups); @@ -497,10 +494,14 @@ public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesP ordinals.beginPositionEntry(); } for (int v = 0; v < valueCount; v++) { - Map.Entry value = randomFrom(dictionary); - valuesAtPosition.add(new BytesRef(value.getKey())); - ordinals.appendInt(value.getValue()); - ordsAtPosition.add(value.getValue()); + String key = randomFrom(keys); + int ordinal = dictionary.computeIfAbsent(key, k -> { + bytes.appendBytesRef(new BytesRef(k)); + return dictionary.size(); + }); + valuesAtPosition.add(new BytesRef(key)); + ordinals.appendInt(ordinal); + ordsAtPosition.add(ordinal); } for (int v = 0; v < dupCount; v++) { ordinals.appendInt(randomFrom(ordsAtPosition)); From bc57bb02c1f5271d6f20159fafbea0fa7f01de02 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 21 Oct 2024 20:28:00 +0200 Subject: [PATCH 54/67] Always check the parent breaker with zero bytes in PreallocatedCircuitBreakerService (#115181) PreallocatedCircuitBreakerService will call the parent breaker if the nunber of bytes passed is zero. --- docs/changelog/115181.yaml | 5 +++++ .../common/breaker/PreallocatedCircuitBreakerService.java | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/115181.yaml diff --git a/docs/changelog/115181.yaml b/docs/changelog/115181.yaml new file mode 100644 index 0000000000000..65f59d5ed0add --- /dev/null +++ b/docs/changelog/115181.yaml @@ -0,0 +1,5 @@ +pr: 115181 +summary: Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` +area: Aggregations +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java index 9327dbe78077f..e5c9b14cf90fc 100644 --- a/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/common/breaker/PreallocatedCircuitBreakerService.java @@ -109,8 +109,8 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu if (closed) { throw new IllegalStateException("already closed"); } - if (preallocationUsed == preallocated) { - // Preallocation buffer was full before this request + if (preallocationUsed == preallocated || bytes == 0L) { + // Preallocation buffer was full before this request or we are checking the parent circuit breaker next.addEstimateBytesAndMaybeBreak(bytes, label); return; } From 79de53ae7b84547b00084327ce1ee38d5c7e6097 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Mon, 21 Oct 2024 15:13:21 -0400 Subject: [PATCH 55/67] Unmute recently failing CCQ tests (#115218) --- muted-tests.yml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 482966e0f97f9..971fc161c4632 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -271,9 +271,6 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test022InstallPluginsFromLocalArchive issue: https://github.com/elastic/elasticsearch/issues/111063 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-across-clusters/line_196} - issue: https://github.com/elastic/elasticsearch/issues/114488 - class: org.elasticsearch.gradle.internal.PublishPluginFuncTest issue: https://github.com/elastic/elasticsearch/issues/114492 - class: org.elasticsearch.xpack.inference.DefaultElserIT @@ -332,14 +329,6 @@ tests: - class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests method: testBlockLoaderFromRowStrideReaderWithSyntheticSource issue: https://github.com/elastic/elasticsearch/issues/115076 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - method: test {string.ValuesGrouped} - issue: https://github.com/elastic/elasticsearch/issues/115126 -- class: org.elasticsearch.xpack.esql.action.CrossClustersQueryIT - method: testCCSExecutionOnSearchesWithLimit0 - issue: https://github.com/elastic/elasticsearch/issues/115129 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - issue: https://github.com/elastic/elasticsearch/issues/115135 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 From 2ff6bb05431ea1525278dcc858e01ce30fb0e15c Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 21 Oct 2024 17:08:50 -0500 Subject: [PATCH 56/67] Adding support for additional mapping to simulate ingest API (#114742) --- docs/changelog/114742.yaml | 5 + .../ingest/apis/simulate-ingest.asciidoc | 13 + .../test/ingest/80_ingest_simulate.yml | 355 ++++++++++++++++++ .../bulk/TransportSimulateBulkActionIT.java | 54 ++- .../org/elasticsearch/TransportVersions.java | 1 + .../action/bulk/BulkFeatures.java | 4 +- .../action/bulk/SimulateBulkRequest.java | 77 ++-- .../bulk/TransportSimulateBulkAction.java | 172 +++++---- .../ingest/RestSimulateIngestAction.java | 17 +- .../action/bulk/SimulateBulkRequestTests.java | 94 ++++- .../TransportSimulateBulkActionTests.java | 8 +- .../ingest/SimulateIngestServiceTests.java | 6 +- 12 files changed, 675 insertions(+), 131 deletions(-) create mode 100644 docs/changelog/114742.yaml diff --git a/docs/changelog/114742.yaml b/docs/changelog/114742.yaml new file mode 100644 index 0000000000000..5bd3dad4400b8 --- /dev/null +++ b/docs/changelog/114742.yaml @@ -0,0 +1,5 @@ +pr: 114742 +summary: Adding support for additional mapping to simulate ingest API +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/reference/ingest/apis/simulate-ingest.asciidoc b/docs/reference/ingest/apis/simulate-ingest.asciidoc index 1bee03ea3e58a..da591eed7546f 100644 --- a/docs/reference/ingest/apis/simulate-ingest.asciidoc +++ b/docs/reference/ingest/apis/simulate-ingest.asciidoc @@ -108,6 +108,14 @@ POST /_ingest/_simulate "index_patterns": ["my-index-*"], "composed_of": ["component_template_1", "component_template_2"] } + }, + "mapping_addition": { <4> + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } } } ---- @@ -117,6 +125,7 @@ POST /_ingest/_simulate These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result. <3> This replaces the existing `my-index-template` index template with the contents given here for the duration of this request. These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result. +<4> This mapping is merged into the index's final mapping just before validation. It is used only for the duration of this request. [[simulate-ingest-api-request]] ==== {api-request-title} @@ -246,6 +255,10 @@ include::{es-ref-dir}/indices/put-index-template.asciidoc[tag=request-body] ==== +`mapping_addition`:: +(Optional, <>) +Definition of a mapping that will be merged into the index's mapping for validation during the course of this request. + [[simulate-ingest-api-example]] ==== {api-examples-title} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 18eb401aaa0fe..d4aa2f1ad4467 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1216,3 +1216,358 @@ setup: - match: { docs.0.doc._source.foo: "FOO" } - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } - not_exists: docs.0.doc.error + +--- +"Test ingest simulate with mapping addition for data streams": + # In this test, we make sure that when the index template is a data stream template, simulate ingest works the same whether the data + # stream has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.mapping.addition"] + reason: "ingest simulate mapping addition added in 8.16" + + - do: + headers: + Content-Type: application/json + ingest.put_pipeline: + id: "foo-pipeline" + body: > + { + "processors": [ + { + "set": { + "field": "foo", + "value": true + } + } + ] + } + - match: { acknowledged: true } + + - do: + cluster.put_component_template: + name: mappings_template + body: + template: + mappings: + dynamic: strict + properties: + foo: + type: boolean + + - do: + cluster.put_component_template: + name: settings_template + body: + template: + settings: + index: + default_pipeline: "foo-pipeline" + + - do: + allowed_warnings: + - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-1 + body: + index_patterns: + - foo* + composed_of: + - mappings_template + - settings_template + + - do: + allowed_warnings: + - "index template [my-template-1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template-1] will take precedence during new index creation" + indices.put_index_template: + name: my-template-1 + body: + index_patterns: [simple-data-stream1] + composed_of: + - mappings_template + - settings_template + data_stream: {} + + # Here we replace my-template-1 with a substitute version that uses the settings_template_2 and mappings_template_2 templates defined in + # this request, and foo-pipeline-2 defined in this request. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template_2": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template_2": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "integer" + } + } + } + } + } + }, + "index_template_substitutions": { + "my-template-1": { + "index_patterns": ["simple-data-stream1"], + "composed_of": ["settings_template_2", "mappings_template_2"], + "data_stream": {} + } + }, + "mapping_addition": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error + + - do: + indices.create_data_stream: + name: simple-data-stream1 + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: yellow + + # Now that we have created a data stream, run the exact same simulate ingeset request to make sure we still get the same result, and that + # the substitutions and additions from the simulate ingest request are used instead of information from the data stream or its backing + # index. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template_2": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template_2": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "integer" + } + } + } + } + } + }, + "index_template_substitutions": { + "my-template-1": { + "index_patterns": ["simple-data-stream1"], + "composed_of": ["settings_template_2", "mappings_template_2"], + "data_stream": {} + } + }, + "mapping_addition": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error + +--- +"Test mapping addition works with legacy templates": + # In this test, we make sure that when the index template is a data stream template, simulate ingest works the same whether the data + # stream has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.mapping.addition"] + reason: "ingest simulate mapping addition added in 8.16" + + - do: + indices.put_template: + name: my-legacy-template + body: + index_patterns: foo-* + settings: + number_of_replicas: 0 + mappings: + dynamic: strict + properties: + foo: + type: integer + bar: + type: boolean + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - match: { docs.0.doc.error.type: "document_parsing_exception" } + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - not_exists: docs.0.doc.error + + - do: + indices.create: + index: foo-1 + - match: { acknowledged: true } + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "not a boolean" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "not a boolean" } + - not_exists: docs.0.doc.error diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index cd17c5b345c59..d5d21c548a15d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -59,7 +60,7 @@ public void testMappingValidationIndexExists() { } """; indicesAdmin().create(new CreateIndexRequest(indexName).mapping(mapping)).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -131,10 +132,10 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc String indexName = "my-index-1"; // First, run before the index is created: - assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName); + assertMappingsUpdatedFromSubstitutions(indexName, indexTemplateName); // Now, create the index and make sure the component template substitutions work the same: indicesAdmin().create(new CreateIndexRequest(indexName)).actionGet(); - assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName); + assertMappingsUpdatedFromSubstitutions(indexName, indexTemplateName); // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); @@ -146,7 +147,7 @@ public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOExc assertThat(fields.size(), equalTo(1)); } - private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String indexName, String indexTemplateName) { + private void assertMappingsUpdatedFromSubstitutions(String indexName, String indexTemplateName) { IndexRequest indexRequest1 = new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -159,7 +160,7 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde """, XContentType.JSON).id(randomUUID()); { // First we use the original component template, and expect a failure in the second document: - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(indexRequest1); bulkRequest.add(indexRequest2); BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); @@ -192,6 +193,7 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde ) ) ), + Map.of(), Map.of() ); bulkRequest.add(indexRequest1); @@ -226,7 +228,34 @@ private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String inde ) ) ), - Map.of(indexTemplateName, Map.of("index_patterns", List.of(indexName), "composed_of", List.of("test-component-template-2"))) + Map.of( + indexTemplateName, + Map.of("index_patterns", List.of(indexName), "composed_of", List.of("test-component-template-2")) + ), + Map.of() + ); + bulkRequest.add(indexRequest1); + bulkRequest.add(indexRequest2); + BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); + assertThat(response.getItems().length, equalTo(2)); + assertThat(response.getItems()[0].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); + assertNull(((SimulateIndexResponse) response.getItems()[0].getResponse()).getException()); + assertThat(response.getItems()[1].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); + assertNull(((SimulateIndexResponse) response.getItems()[1].getResponse()).getException()); + } + + { + /* + * Now we mapping_addition that defines both fields, so we expect no exception: + */ + BulkRequest bulkRequest = new SimulateBulkRequest( + Map.of(), + Map.of(), + Map.of(), + Map.of( + "_doc", + Map.of("dynamic", "strict", "properties", Map.of("foo1", Map.of("type", "text"), "foo3", Map.of("type", "text"))) + ) ); bulkRequest.add(indexRequest1); bulkRequest.add(indexRequest2); @@ -245,7 +274,7 @@ public void testMappingValidationIndexDoesNotExistsNoTemplate() { * mapping-less "random-index-template" created by the parent class), so we expect no mapping validation failure. */ String indexName = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -292,7 +321,7 @@ public void testMappingValidationIndexDoesNotExistsV2Template() throws IOExcepti request.indexTemplate(composableIndexTemplate); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -324,7 +353,7 @@ public void testMappingValidationIndexDoesNotExistsV1Template() { indicesAdmin().putTemplate( new PutIndexTemplateRequest("test-template").patterns(List.of("my-index-*")).mapping("foo1", "type=integer") ).actionGet(); - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -378,7 +407,7 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); { // First, try with no @timestamp to make sure we're picking up data-stream-specific templates - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "foo1": "baz" @@ -389,7 +418,8 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti "foo3": "baz" } """, XContentType.JSON).id(randomUUID())); - BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet(); + BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest) + .actionGet(5, TimeUnit.SECONDS); assertThat(response.getItems().length, equalTo(2)); assertThat(response.getItems()[0].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED)); assertThat( @@ -404,7 +434,7 @@ public void testMappingValidationIndexDoesNotExistsDataStream() throws IOExcepti } { // Now with @timestamp - BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); bulkRequest.add(new IndexRequest(indexName).source(""" { "@timestamp": "2024-08-27", diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cde09d33516c9..7e06004e47cfb 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -178,6 +178,7 @@ static TransportVersion def(int id) { public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); + public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 78e603fba9be0..22cf8a2260d87 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -16,6 +16,7 @@ import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS; +import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES; @@ -25,7 +26,8 @@ public Set getFeatures() { SIMULATE_MAPPING_VALIDATION, SIMULATE_MAPPING_VALIDATION_TEMPLATES, SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, - SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS + SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, + SIMULATE_MAPPING_ADDITION ); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java index 6fa22151396df..cc7fd431d8097 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java @@ -15,12 +15,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.IOException; -import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; /** * This extends BulkRequest with support for providing substitute pipeline definitions, component template definitions, and index template @@ -73,7 +73,8 @@ * } * } * } - * }, + * } + * }, * "index_template_substitutions": { * "my-index-template-1": { * "template": { @@ -84,6 +85,13 @@ * ] * } * } + * }, + * "mapping_addition": { + * "dynamic": "strict", + * "properties": { + * "foo": { + * "type": "keyword" + * } * } * * The pipelineSubstitutions Map held by this class is intended to be the result of XContentHelper.convertToMap(). The top-level keys @@ -94,6 +102,7 @@ public class SimulateBulkRequest extends BulkRequest { private final Map> pipelineSubstitutions; private final Map> componentTemplateSubstitutions; private final Map> indexTemplateSubstitutions; + private final Map mappingAddition; /** * @param pipelineSubstitutions The pipeline definitions that are to be used in place of any pre-existing pipeline definitions with @@ -103,16 +112,23 @@ public class SimulateBulkRequest extends BulkRequest { * component template definitions with the same name. * @param indexTemplateSubstitutions The index template definitions that are to be used in place of any pre-existing * index template definitions with the same name. + * @param mappingAddition A mapping that will be merged into the final index's mapping for mapping validation */ public SimulateBulkRequest( - @Nullable Map> pipelineSubstitutions, - @Nullable Map> componentTemplateSubstitutions, - @Nullable Map> indexTemplateSubstitutions + Map> pipelineSubstitutions, + Map> componentTemplateSubstitutions, + Map> indexTemplateSubstitutions, + Map mappingAddition ) { super(); + Objects.requireNonNull(pipelineSubstitutions); + Objects.requireNonNull(componentTemplateSubstitutions); + Objects.requireNonNull(indexTemplateSubstitutions); + Objects.requireNonNull(mappingAddition); this.pipelineSubstitutions = pipelineSubstitutions; this.componentTemplateSubstitutions = componentTemplateSubstitutions; this.indexTemplateSubstitutions = indexTemplateSubstitutions; + this.mappingAddition = mappingAddition; } @SuppressWarnings("unchecked") @@ -129,6 +145,11 @@ public SimulateBulkRequest(StreamInput in) throws IOException { } else { indexTemplateSubstitutions = Map.of(); } + if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + this.mappingAddition = (Map) in.readGenericValue(); + } else { + mappingAddition = Map.of(); + } } @Override @@ -141,6 +162,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) { out.writeGenericValue(indexTemplateSubstitutions); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + out.writeGenericValue(mappingAddition); + } } public Map> getPipelineSubstitutions() { @@ -153,41 +177,39 @@ public boolean isSimulated() { } @Override - public Map getComponentTemplateSubstitutions() throws IOException { - if (componentTemplateSubstitutions == null) { - return Map.of(); - } - Map result = new HashMap<>(componentTemplateSubstitutions.size()); - for (Map.Entry> rawEntry : componentTemplateSubstitutions.entrySet()) { - result.put(rawEntry.getKey(), convertRawTemplateToComponentTemplate(rawEntry.getValue())); - } - return result; + public Map getComponentTemplateSubstitutions() { + return componentTemplateSubstitutions.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> convertRawTemplateToComponentTemplate(entry.getValue()))); } @Override - public Map getIndexTemplateSubstitutions() throws IOException { - if (indexTemplateSubstitutions == null) { - return Map.of(); - } - Map result = new HashMap<>(indexTemplateSubstitutions.size()); - for (Map.Entry> rawEntry : indexTemplateSubstitutions.entrySet()) { - result.put(rawEntry.getKey(), convertRawTemplateToIndexTemplate(rawEntry.getValue())); - } - return result; + public Map getIndexTemplateSubstitutions() { + return indexTemplateSubstitutions.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> convertRawTemplateToIndexTemplate(entry.getValue()))); + } + + public Map getMappingAddition() { + return mappingAddition; } - private static ComponentTemplate convertRawTemplateToComponentTemplate(Map rawTemplate) throws IOException { + private static ComponentTemplate convertRawTemplateToComponentTemplate(Map rawTemplate) { ComponentTemplate componentTemplate; try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) { componentTemplate = ComponentTemplate.parse(parser); + } catch (IOException e) { + throw new RuntimeException(e); } return componentTemplate; } - private static ComposableIndexTemplate convertRawTemplateToIndexTemplate(Map rawTemplate) throws IOException { + private static ComposableIndexTemplate convertRawTemplateToIndexTemplate(Map rawTemplate) { ComposableIndexTemplate indexTemplate; try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) { indexTemplate = ComposableIndexTemplate.parse(parser); + } catch (IOException e) { + throw new RuntimeException(e); } return indexTemplate; } @@ -197,7 +219,8 @@ public BulkRequest shallowClone() { BulkRequest bulkRequest = new SimulateBulkRequest( pipelineSubstitutions, componentTemplateSubstitutions, - indexTemplateSubstitutions + indexTemplateSubstitutions, + mappingAddition ); bulkRequest.setRefreshPolicy(getRefreshPolicy()); bulkRequest.waitForActiveShards(waitForActiveShards()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index d7c555879c00f..0888b70f5399c 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -26,10 +26,13 @@ import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; @@ -37,6 +40,7 @@ import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; @@ -50,6 +54,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.HashMap; @@ -75,6 +83,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { "simulate.component.template.substitutions" ); public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions"); + public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition"); private final IndicesService indicesService; private final NamedXContentRegistry xContentRegistry; private final Set indexSettingProviders; @@ -122,11 +131,17 @@ protected void doInternalExecute( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); Map componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions(); Map indexTemplateSubstitutions = bulkRequest.getIndexTemplateSubstitutions(); + Map mappingAddition = ((SimulateBulkRequest) bulkRequest).getMappingAddition(); for (int i = 0; i < bulkRequest.requests.size(); i++) { DocWriteRequest docRequest = bulkRequest.requests.get(i); assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests"; IndexRequest request = (IndexRequest) docRequest; - Exception mappingValidationException = validateMappings(componentTemplateSubstitutions, indexTemplateSubstitutions, request); + Exception mappingValidationException = validateMappings( + componentTemplateSubstitutions, + indexTemplateSubstitutions, + mappingAddition, + request + ); responses.set( i, BulkItemResponse.success( @@ -159,6 +174,7 @@ protected void doInternalExecute( private Exception validateMappings( Map componentTemplateSubstitutions, Map indexTemplateSubstitutions, + Map mappingAddition, IndexRequest request ) { final SourceToParse sourceToParse = new SourceToParse( @@ -174,7 +190,10 @@ private Exception validateMappings( Exception mappingValidationException = null; IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(request.index()); try { - if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty() && indexTemplateSubstitutions.isEmpty()) { + if (indexAbstraction != null + && componentTemplateSubstitutions.isEmpty() + && indexTemplateSubstitutions.isEmpty() + && mappingAddition.isEmpty()) { /* * In this case the index exists and we don't have any component template overrides. So we can just use withTempIndexService * to do the mapping validation, using all the existing logic for validation. @@ -250,36 +269,8 @@ private Exception validateMappings( indexSettingProviders ); CompressedXContent mappings = template.mappings(); - if (mappings != null) { - MappingMetadata mappingMetadata = new MappingMetadata(mappings); - Settings dummySettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - final IndexMetadata imd = IndexMetadata.builder(request.index()) - .settings(dummySettings) - .putMapping(mappingMetadata) - .build(); - indicesService.withTempIndexService(imd, indexService -> { - indexService.mapperService().updateMapping(null, imd); - return IndexShard.prepareIndex( - indexService.mapperService(), - sourceToParse, - SequenceNumbers.UNASSIGNED_SEQ_NO, - -1, - -1, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - Long.MIN_VALUE, - false, - request.ifSeqNo(), - request.ifPrimaryTerm(), - 0 - ); - }); - } + CompressedXContent mergedMappings = mergeMappings(mappings, mappingAddition); + validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); } else { List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( @@ -287,40 +278,8 @@ private Exception validateMappings( matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), xContentRegistry ); - final CompressedXContent combinedMappings; - if (mappingsMap.isEmpty()) { - combinedMappings = null; - } else { - combinedMappings = new CompressedXContent(mappingsMap); - } - Settings dummySettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - MappingMetadata mappingMetadata = combinedMappings == null ? null : new MappingMetadata(combinedMappings); - final IndexMetadata imd = IndexMetadata.builder(request.index()) - .putMapping(mappingMetadata) - .settings(dummySettings) - .build(); - indicesService.withTempIndexService(imd, indexService -> { - indexService.mapperService().updateMapping(null, imd); - return IndexShard.prepareIndex( - indexService.mapperService(), - sourceToParse, - SequenceNumbers.UNASSIGNED_SEQ_NO, - -1, - -1, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - Long.MIN_VALUE, - false, - request.ifSeqNo(), - request.ifPrimaryTerm(), - 0 - ); - }); + final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } } } catch (Exception e) { @@ -329,6 +288,66 @@ private Exception validateMappings( return mappingValidationException; } + /* + * Validates that when updatedMappings are applied + */ + private void validateUpdatedMappings( + @Nullable CompressedXContent originalMappings, + @Nullable CompressedXContent updatedMappings, + IndexRequest request, + SourceToParse sourceToParse + ) throws IOException { + if (updatedMappings == null) { + return; // no validation to do + } + Settings dummySettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetadata.Builder originalIndexMetadataBuilder = IndexMetadata.builder(request.index()).settings(dummySettings); + if (originalMappings != null) { + originalIndexMetadataBuilder.putMapping(new MappingMetadata(originalMappings)); + } + final IndexMetadata originalIndexMetadata = originalIndexMetadataBuilder.build(); + final IndexMetadata updatedIndexMetadata = IndexMetadata.builder(request.index()) + .settings(dummySettings) + .putMapping(new MappingMetadata(updatedMappings)) + .build(); + indicesService.withTempIndexService(originalIndexMetadata, indexService -> { + indexService.mapperService().merge(updatedIndexMetadata, MapperService.MergeReason.MAPPING_UPDATE); + return IndexShard.prepareIndex( + indexService.mapperService(), + sourceToParse, + SequenceNumbers.UNASSIGNED_SEQ_NO, + -1, + -1, + VersionType.INTERNAL, + Engine.Operation.Origin.PRIMARY, + Long.MIN_VALUE, + false, + request.ifSeqNo(), + request.ifPrimaryTerm(), + 0 + ); + }); + } + + private static CompressedXContent mergeMappings(@Nullable CompressedXContent originalMapping, Map mappingAddition) + throws IOException { + Map combinedMappingMap = new HashMap<>(); + if (originalMapping != null) { + combinedMappingMap.putAll(XContentHelper.convertToMap(originalMapping.uncompressed(), true, XContentType.JSON).v2()); + } + XContentHelper.update(combinedMappingMap, mappingAddition, true); + if (combinedMappingMap.isEmpty()) { + return null; + } else { + return convertMappingMapToXContent(combinedMappingMap); + } + } + /* * This overrides TransportSimulateBulkAction's getIngestService to allow us to provide an IngestService that handles pipeline * substitutions defined in the request. @@ -344,4 +363,25 @@ protected Boolean resolveFailureStore(String indexName, Metadata metadata, long // A simulate bulk request should not change any persistent state in the system, so we never write to the failure store return null; } + + private static CompressedXContent convertMappingMapToXContent(Map rawAdditionalMapping) throws IOException { + CompressedXContent compressedXContent; + if (rawAdditionalMapping == null || rawAdditionalMapping.isEmpty()) { + compressedXContent = null; + } else { + try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawAdditionalMapping)) { + compressedXContent = mappingFromXContent(parser); + } + } + return compressedXContent; + } + + private static CompressedXContent mappingFromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + return new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(parser.mapOrdered()))); + } else { + throw new IllegalArgumentException("Unexpected token: " + token); + } + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java index 680860332fe74..c825a8198e6e4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java @@ -74,10 +74,21 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String defaultPipeline = request.param("pipeline"); Tuple sourceTuple = request.contentOrSourceParam(); Map sourceMap = XContentHelper.convertToMap(sourceTuple.v2(), false, sourceTuple.v1()).v2(); + Map> pipelineSubstitutions = (Map>) sourceMap.remove( + "pipeline_substitutions" + ); + Map> componentTemplateSubstitutions = (Map>) sourceMap.remove( + "component_template_substitutions" + ); + Map> indexTemplateSubstitutions = (Map>) sourceMap.remove( + "index_template_substitutions" + ); + Object mappingAddition = sourceMap.remove("mapping_addition"); SimulateBulkRequest bulkRequest = new SimulateBulkRequest( - (Map>) sourceMap.remove("pipeline_substitutions"), - (Map>) sourceMap.remove("component_template_substitutions"), - (Map>) sourceMap.remove("index_template_substitutions") + pipelineSubstitutions == null ? Map.of() : pipelineSubstitutions, + componentTemplateSubstitutions == null ? Map.of() : componentTemplateSubstitutions, + indexTemplateSubstitutions == null ? Map.of() : indexTemplateSubstitutions, + mappingAddition == null ? Map.of() : Map.of("_doc", mappingAddition) ); BytesReference transformedData = convertToBulkRequestXContentBytes(sourceMap); bulkRequest.add( diff --git a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java index c94e4e46c9ee3..1e651791eb18a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java @@ -22,32 +22,74 @@ import java.util.List; import java.util.Map; +import static java.util.Map.entry; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class SimulateBulkRequestTests extends ESTestCase { public void testSerialization() throws Exception { - testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions()); - testSerialization(getTestPipelineSubstitutions(), null, null); - testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), null); - testSerialization(getTestPipelineSubstitutions(), null, getTestIndexTemplateSubstitutions()); - testSerialization(null, getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions()); - testSerialization(null, getTestComponentTemplateSubstitutions(), null); - testSerialization(null, null, getTestIndexTemplateSubstitutions()); - testSerialization(null, null, null); - testSerialization(Map.of(), Map.of(), Map.of()); + testSerialization( + getMapOrEmpty(getTestPipelineSubstitutions()), + getMapOrEmpty(getTestComponentTemplateSubstitutions()), + getMapOrEmpty(getTestIndexTemplateSubstitutions()), + getMapOrEmpty(getTestMappingAddition()) + ); + } + + private Map getMapOrEmpty(Map map) { + if (randomBoolean()) { + return map; + } else { + return Map.of(); + } + } + + public void testNullsNotAllowed() { + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + null, + getTestPipelineSubstitutions(), + getTestComponentTemplateSubstitutions(), + getTestMappingAddition() + ) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + getTestPipelineSubstitutions(), + null, + getTestComponentTemplateSubstitutions(), + getTestMappingAddition() + ) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest(getTestPipelineSubstitutions(), getTestPipelineSubstitutions(), null, getTestMappingAddition()) + ); + assertThrows( + NullPointerException.class, + () -> new SimulateBulkRequest( + getTestPipelineSubstitutions(), + getTestPipelineSubstitutions(), + getTestComponentTemplateSubstitutions(), + null + ) + ); } private void testSerialization( Map> pipelineSubstitutions, Map> componentTemplateSubstitutions, - Map> indexTemplateSubstitutions + Map> indexTemplateSubstitutions, + Map mappingAddition ) throws IOException { SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest( pipelineSubstitutions, componentTemplateSubstitutions, - indexTemplateSubstitutions + indexTemplateSubstitutions, + mappingAddition ); /* * Note: SimulateBulkRequest does not implement equals or hashCode, so we can't test serialization in the usual way for a @@ -59,7 +101,7 @@ private void testSerialization( @SuppressWarnings({ "unchecked", "rawtypes" }) public void testGetComponentTemplateSubstitutions() throws IOException { - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); assertThat(simulateBulkRequest.getComponentTemplateSubstitutions(), equalTo(Map.of())); String substituteComponentTemplatesString = """ { @@ -93,7 +135,7 @@ public void testGetComponentTemplateSubstitutions() throws IOException { XContentType.JSON ).v2(); Map> substituteComponentTemplates = (Map>) tempMap; - simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates, Map.of()); + simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates, Map.of(), Map.of()); Map componentTemplateSubstitutions = simulateBulkRequest.getComponentTemplateSubstitutions(); assertThat(componentTemplateSubstitutions.size(), equalTo(2)); assertThat( @@ -118,7 +160,7 @@ public void testGetComponentTemplateSubstitutions() throws IOException { } public void testGetIndexTemplateSubstitutions() throws IOException { - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of()); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); assertThat(simulateBulkRequest.getIndexTemplateSubstitutions(), equalTo(Map.of())); String substituteIndexTemplatesString = """ { @@ -154,7 +196,7 @@ public void testGetIndexTemplateSubstitutions() throws IOException { randomBoolean(), XContentType.JSON ).v2(); - simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), substituteIndexTemplates); + simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), substituteIndexTemplates, Map.of()); Map indexTemplateSubstitutions = simulateBulkRequest.getIndexTemplateSubstitutions(); assertThat(indexTemplateSubstitutions.size(), equalTo(2)); assertThat( @@ -179,7 +221,8 @@ public void testShallowClone() throws IOException { SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest( getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), - getTestIndexTemplateSubstitutions() + getTestIndexTemplateSubstitutions(), + getTestMappingAddition() ); simulateBulkRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); simulateBulkRequest.waitForActiveShards(randomIntBetween(1, 10)); @@ -204,7 +247,6 @@ public void testShallowClone() throws IOException { assertThat(shallowCopy.routing(), equalTo(simulateBulkRequest.routing())); assertThat(shallowCopy.requireAlias(), equalTo(simulateBulkRequest.requireAlias())); assertThat(shallowCopy.requireDataStream(), equalTo(simulateBulkRequest.requireDataStream())); - } private static Map> getTestPipelineSubstitutions() { @@ -248,4 +290,22 @@ private static Map> getTestIndexTemplateSubstitution Map.of("template", Map.of("index_patterns", List.of("foo*", "bar*"), "mappings", Map.of(), "settings", Map.of())) ); } + + private static Map getTestMappingAddition() { + return Map.ofEntries( + entry( + "_doc", + Map.ofEntries( + entry("dynamic", "strict"), + entry( + "properties", + Map.ofEntries( + entry("foo", Map.ofEntries(entry("type", "keyword"))), + entry("bar", Map.ofEntries(entry("type", "boolean"))) + ) + ) + ) + ) + ); + } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 71bc31334920e..63d308e1579f3 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -135,7 +135,7 @@ public void tearDown() throws Exception { public void testIndexData() throws IOException { Task task = mock(Task.class); // unused - BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null); + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); int bulkItemCount = randomIntBetween(0, 200); for (int i = 0; i < bulkItemCount; i++) { Map source = Map.of(randomAlphaOfLength(10), randomAlphaOfLength(5)); @@ -218,7 +218,11 @@ public void testIndexDataWithValidation() throws IOException { * (7) An indexing request to a nonexistent index that matches no templates */ Task task = mock(Task.class); // unused - BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null); + /* + * Here we only add a mapping_addition because if there is no mapping at all TransportSimulateBulkAction skips mapping validation + * altogether, and we need it to run for this test to pass. + */ + BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of("_doc", Map.of("dynamic", "strict"))); int bulkItemCount = randomIntBetween(0, 200); Map indicesMap = new HashMap<>(); Map v1Templates = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java index 3b3f5bdc747b5..94b3607bd7608 100644 --- a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java @@ -65,7 +65,7 @@ public void testGetPipeline() { ingestService.innerUpdatePipelines(ingestMetadata); { // First we make sure that if there are no substitutions that we get our original pipeline back: - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(null, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline = simulateIngestService.getPipeline("pipeline1"); assertThat(pipeline.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1")))); @@ -83,7 +83,7 @@ public void testGetPipeline() { ); pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap())))); - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1"); assertThat( @@ -103,7 +103,7 @@ public void testGetPipeline() { */ Map> pipelineSubstitutions = new HashMap<>(); pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap())))); - SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null); + SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, Map.of(), Map.of(), Map.of()); SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest); Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1"); assertThat(pipeline1.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1")))); From f6c0a245fd15519990316f1e17e3e30ef0d31662 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 22 Oct 2024 11:20:22 +1100 Subject: [PATCH 57/67] [Test] Add client param indexExists (#115180) This allows us to use the admin client to easily check whether an index exists (that may not be visible to the standard client) --- .../java/org/elasticsearch/test/rest/ESRestTestCase.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index d17016f850300..22f93e6bda61f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1896,7 +1896,11 @@ protected Map getIndexMappingAsMap(String index) throws IOExcept } protected static boolean indexExists(String index) throws IOException { - Response response = client().performRequest(new Request("HEAD", "/" + index)); + return indexExists(client(), index); + } + + protected static boolean indexExists(RestClient client, String index) throws IOException { + Response response = client.performRequest(new Request("HEAD", "/" + index)); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } From aff4edd51bcb32c6478056087492c73890f8cf23 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 07:21:18 +0200 Subject: [PATCH 58/67] Don't use a BytesStreamOutput to copy keys in BytesRefBlockHash (#114819) Removes he size limit on BytesStreamOutput when copying keys. --- docs/changelog/114819.yaml | 6 +++++ .../blockhash/BytesRefBlockHash.java | 20 +++++------------ .../aggregation/blockhash/X-BlockHash.java.st | 22 +++++-------------- 3 files changed, 16 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/114819.yaml diff --git a/docs/changelog/114819.yaml b/docs/changelog/114819.yaml new file mode 100644 index 0000000000000..f8d03f7024801 --- /dev/null +++ b/docs/changelog/114819.yaml @@ -0,0 +1,6 @@ +pr: 114819 +summary: Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` +area: EQL +type: bug +issues: + - 114599 diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 3c5bf2c18c915..b8ea7658a8247 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -8,12 +8,9 @@ package org.elasticsearch.compute.aggregation.blockhash; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -30,8 +27,6 @@ import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.core.ReleasableIterator; -import java.io.IOException; - /** * Maps a {@link BytesRefBlock} column to group ids. * This class is generated. Do not edit it. @@ -197,26 +192,21 @@ public BytesRefBlock[] getKeys() { * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index 7c21cff56d7bb..2a3d1143236ac 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -9,15 +9,10 @@ package org.elasticsearch.compute.aggregation.blockhash; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; $endif$ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -$if(BytesRef)$ -import org.elasticsearch.common.util.BytesRefArray; -$endif$ import org.elasticsearch.common.util.$Hash$; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -58,8 +53,6 @@ $endif$ import org.elasticsearch.core.ReleasableIterator; $if(BytesRef)$ -import java.io.IOException; - $else$ import java.util.BitSet; @@ -250,26 +243,21 @@ $if(BytesRef)$ * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } $else$ if (seenNull) { From 013760cf4a408403044dc935122cf521d63dd92a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 22 Oct 2024 07:37:28 +0200 Subject: [PATCH 59/67] Grow internal arrays when growing the capacity in AbstractHash implementations (#114907) This commit resizes those arrays when incrementing the capacity of the hashes to the maxSize. --- .../java/org/elasticsearch/common/util/BytesRefHash.java | 6 +++--- .../main/java/org/elasticsearch/common/util/LongHash.java | 4 ++-- .../java/org/elasticsearch/common/util/LongLongHash.java | 4 ++-- .../compute/aggregation/blockhash/BlockHashTests.java | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 208d29edad71d..288462ba3bbcb 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -48,7 +48,7 @@ public BytesRefHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { boolean success = false; try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - this.hashes = bigArrays.newIntArray(capacity, false); + this.hashes = bigArrays.newIntArray(maxSize, false); this.bytesRefs = new BytesRefArray(capacity, bigArrays); success = true; } finally { @@ -98,7 +98,7 @@ public BytesRefHash(BytesRefArray bytesRefs, float maxLoadFactor, BigArrays bigA boolean success = false; try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - this.hashes = bigArrays.newIntArray(bytesRefs.size() + 1, false); + this.hashes = bigArrays.newIntArray(maxSize, false); this.bytesRefs = BytesRefArray.takeOwnershipOf(bytesRefs); success = true; } finally { @@ -182,7 +182,6 @@ private long set(BytesRef key, int code, long id) { private void append(long id, BytesRef key, int code) { assert size == id; bytesRefs.append(key); - hashes = bigArrays.grow(hashes, id + 1); hashes.set(id, code); } @@ -211,6 +210,7 @@ public long add(BytesRef key, int code) { if (size >= maxSize) { assert size == maxSize; grow(); + hashes = bigArrays.resize(hashes, maxSize); } assert size < maxSize; return set(key, rehash(code), size); diff --git a/server/src/main/java/org/elasticsearch/common/util/LongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongHash.java index 0c681063c50b0..3eeb60e419a19 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongHash.java @@ -33,7 +33,7 @@ public LongHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - keys = bigArrays.newLongArray(capacity, false); + keys = bigArrays.newLongArray(maxSize, false); } finally { if (keys == null) { close(); @@ -78,7 +78,6 @@ private long set(long key, long id) { } private void append(long id, long key) { - keys = bigArrays.grow(keys, id + 1); keys.set(id, key); } @@ -102,6 +101,7 @@ public long add(long key) { if (size >= maxSize) { assert size == maxSize; grow(); + keys = bigArrays.resize(keys, maxSize); } assert size < maxSize; return set(key, size); diff --git a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java index f7708af59dde2..031794ed9c9c6 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java @@ -40,7 +40,7 @@ public LongLongHash(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); try { // `super` allocates a big array so we have to `close` if we fail here or we'll leak it. - keys = bigArrays.newLongArray(2 * capacity, false); + keys = bigArrays.newLongArray(2 * maxSize, false); } finally { if (keys == null) { close(); @@ -99,7 +99,6 @@ private long set(long key1, long key2, long id) { private void append(long id, long key1, long key2) { long keyOffset = 2 * id; - keys = bigArrays.grow(keys, keyOffset + 2); keys.set(keyOffset, key1); keys.set(keyOffset + 1, key2); } @@ -128,6 +127,7 @@ public long add(long key1, long key2) { if (size >= maxSize) { assert size == maxSize; grow(); + keys = bigArrays.resize(keys, maxSize * 2); } assert size < maxSize; return set(key1, key2, size); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index aeea18e52da0f..088e791348840 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -1147,7 +1147,7 @@ public void testLongBytesRefHashWithMultiValuedFields() { } else { assertThat( ordsAndKeys.description, - equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=9, size=491b}") + equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=9, size=483b}") ); assertOrds( ordsAndKeys.ords, From 185bf683787008495cc417a806675654ecb9e996 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 22 Oct 2024 08:58:21 +0200 Subject: [PATCH 60/67] Add prefilters only once in the compound and text similarity retrievers (#114983) This change ensures that the prefilters are propagated in the downstream retrievers only once. It also removes the ability to extends `explainQuery` in the compound retriever. This is not needed as the rank docs are now responsible for the explanation. --- .../retriever/CompoundRetrieverBuilder.java | 18 ++++++------------ .../TextSimilarityRankRetrieverBuilder.java | 19 +------------------ 2 files changed, 7 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index e994c55e43452..85dabf6eb6465 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportMultiSearchAction; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -163,6 +162,11 @@ public final QueryBuilder topDocsQuery() { throw new IllegalStateException("Should not be called, missing a rewrite?"); } + @Override + public final QueryBuilder explainQuery() { + throw new IllegalStateException("Should not be called, missing a rewrite?"); + } + @Override public final void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { throw new IllegalStateException("Should not be called, missing a rewrite?"); @@ -216,22 +220,12 @@ protected SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, .trackTotalHits(false) .storedFields(new StoredFieldsContext(false)) .size(rankWindowSize); + // apply the pre-filters downstream once if (preFilterQueryBuilders.isEmpty() == false) { retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); } retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, true); - // apply the pre-filters - if (preFilterQueryBuilders.size() > 0) { - QueryBuilder query = sourceBuilder.query(); - BoolQueryBuilder newQuery = new BoolQueryBuilder(); - if (query != null) { - newQuery.must(query); - } - preFilterQueryBuilders.forEach(newQuery::filter); - sourceBuilder.query(newQuery); - } - // Record the shard id in the sort result List> sortBuilders = sourceBuilder.sorts() != null ? new ArrayList<>(sourceBuilder.sorts()) : new ArrayList<>(); if (sortBuilders.isEmpty()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 8bccf6e7d1022..342199dc51db8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.common.ParsingException; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -20,7 +19,6 @@ import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; -import org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilder; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -158,33 +156,18 @@ protected RankDoc[] combineInnerRetrieverResults(List rankResults) { return textSimilarityRankDocs; } - @Override - public QueryBuilder explainQuery() { - // the original matching set of the TextSimilarityRank retriever is specified by its nested retriever - return new RankDocsQueryBuilder(rankDocs, new QueryBuilder[] { innerRetrievers.getFirst().retriever().explainQuery() }, true); - } - @Override protected SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, RetrieverBuilder retrieverBuilder) { var sourceBuilder = new SearchSourceBuilder().pointInTimeBuilder(pit) .trackTotalHits(false) .storedFields(new StoredFieldsContext(false)) .size(rankWindowSize); + // apply the pre-filters downstream once if (preFilterQueryBuilders.isEmpty() == false) { retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); } retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, true); - // apply the pre-filters - if (preFilterQueryBuilders.size() > 0) { - QueryBuilder query = sourceBuilder.query(); - BoolQueryBuilder newQuery = new BoolQueryBuilder(); - if (query != null) { - newQuery.must(query); - } - preFilterQueryBuilders.forEach(newQuery::filter); - sourceBuilder.query(newQuery); - } sourceBuilder.rankBuilder( new TextSimilarityRankBuilder(this.field, this.inferenceId, this.inferenceText, this.rankWindowSize, this.minScore) ); From e6e147e93b178391ca001913abac140e99cace78 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Tue, 22 Oct 2024 10:36:06 +0300 Subject: [PATCH 61/67] Adjust failure store to work with TSDS (#114307) In this PR we add a test and we fix the issues we encountered when we enabled the failure store for TSDS and logsdb. **Logsdb** Logsdb worked out of the box, so we just added the test that indexes with a bulk request a couple of documents and tests how they are ingested. **TSDS** Here it was a bit trickier. We encountered the following issues: - TSDS requires a timestamp to determine the write index of the data stream meaning the failure happens earlier than we have anticipated so far. We added a special exception to detect this case and we treat it accordingly. - The template of a TSDS data stream sets certain settings that we do not want to have in the failure store index. We added an allowlist that gets applied before we add the necessary index settings. Furthermore, we added a test case to capture this. --- .../datastreams/TSDBIndexingIT.java | 3 +- .../datastreams/DataStreamFeatures.java | 6 ++ .../test/data_stream/150_tsdb.yml | 101 ++++++++++++++++++ .../190_failure_store_redirection.yml | 2 +- .../CreateIndexClusterStateUpdateRequest.java | 12 +++ .../action/bulk/BulkOperation.java | 7 ++ .../cluster/metadata/DataStream.java | 24 ++++- .../DataStreamFailureStoreDefinition.java | 47 ++++++-- .../MetadataCreateDataStreamService.java | 3 +- .../metadata/MetadataCreateIndexService.java | 7 +- ...DataStreamFailureStoreDefinitionTests.java | 73 +++++++++++++ .../rest-api-spec/test/20_failure_store.yml | 99 +++++++++++++++++ 12 files changed, 369 insertions(+), 15 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java create mode 100644 x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index a2557a4de6e6d..29ec326548f2b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -170,7 +171,7 @@ public void testTimeRanges() throws Exception { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); time = randomBoolean() ? endTime : endTime.plusSeconds(randomIntBetween(1, 99)); indexRequest.source(DOC.replace("$time", formatInstant(time)), XContentType.JSON); - expectThrows(IllegalArgumentException.class, () -> client().index(indexRequest).actionGet()); + expectThrows(IndexDocFailureStoreStatus.ExceptionWithFailureStoreStatus.class, () -> client().index(indexRequest).actionGet()); } // Fetch UpdateTimeSeriesRangeService and increment time range of latest backing index: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index ab7e590b1631e..f60a3e5c47a7f 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -26,6 +26,7 @@ public class DataStreamFeatures implements FeatureSpecification { public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); @Override public Map getHistoricalFeatures() { @@ -41,4 +42,9 @@ public Set getFeatures() { DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 ); } + + @Override + public Set getTestFeatures() { + return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX); + } } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 56f387c016261..de5cf3baa744e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -182,6 +182,107 @@ index without timestamp: body: - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' +--- +TSDB failures go to failure store: + - requires: + cluster_features: ["data_stream.failure_store.tsdb_fix"] + reason: "tests tsdb failure store fixes in 8.16.0 that catch timestamp errors that happen earlier in the process and redirect them to the failure store." + + - do: + allowed_warnings: + - "index template [my-template2] has index patterns [fs-k8s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" + indices.put_index_template: + name: my-template2 + body: + index_patterns: [ "fs-k8s*" ] + data_stream: + failure_store: true + template: + settings: + index: + mode: time_series + number_of_replicas: 1 + number_of_shards: 2 + routing_path: [ metricset, time_series_dimension ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + ip: + type: ip + network: + properties: + tx: + type: long + rx: + type: long + - do: + index: + index: fs-k8s + body: + - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - match: { result : "created"} + - match: { failure_store : "used"} + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "fs-k8s"} }' + - '{"@timestamp":"2021-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{ "@timestamp": "2021-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "fs-k8s"} }' + - '{ "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "fs-k8s"} }' + - '{ "@timestamp":"2000-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{"metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{ "create": { "_index": "k8s"} }' + - '{ "@timestamp":"2000-04-28T01:00:00ZZ", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - is_true: errors + + # Successfully indexed to backing index + - match: { items.0.create._index: '/\.ds-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.0.create.status: 201 } + - is_false: items.0.create.failure_store + - match: { items.1.create._index: '/\.ds-k8s-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.1.create.status: 201 } + - is_false: items.1.create.failure_store + + # Successfully indexed to failure store + - match: { items.2.create._index: '/\.fs-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.2.create.status: 201 } + - match: { items.2.create.failure_store: used } + - match: { items.3.create._index: '/\.fs-fs-k8s-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.3.create.status: 201 } + - match: { items.3.create.failure_store: used } + + # Rejected, eligible to go to failure store, but failure store not enabled + - match: { items.4.create._index: 'k8s' } + - match: { items.4.create.status: 400 } + - match: { items.4.create.error.type: timestamp_error } + - match: { items.4.create.failure_store: not_enabled } + - match: { items.4.create._index: 'k8s' } + - match: { items.4.create.status: 400 } + - match: { items.4.create.error.type: timestamp_error } + - match: { items.4.create.failure_store: not_enabled } + --- index without timestamp with pipeline: - do: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index cb5578a282dc9..9b5a9dae8bc0a 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -879,7 +879,7 @@ teardown: # Successfully indexed to backing index - match: { items.0.create._index: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - match: { items.0.create.status: 201 } - - is_false: items.1.create.failure_store + - is_false: items.0.create.failure_store # Rejected but not eligible to go to failure store - match: { items.1.create._index: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java index 080ebb5951a7a..553f784d23a87 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java @@ -35,6 +35,7 @@ public class CreateIndexClusterStateUpdateRequest { private ResizeType resizeType; private boolean copySettings; private SystemDataStreamDescriptor systemDataStreamDescriptor; + private boolean isFailureIndex = false; private Settings settings = Settings.EMPTY; @@ -102,6 +103,11 @@ public CreateIndexClusterStateUpdateRequest systemDataStreamDescriptor(SystemDat return this; } + public CreateIndexClusterStateUpdateRequest isFailureIndex(boolean isFailureIndex) { + this.isFailureIndex = isFailureIndex; + return this; + } + public String cause() { return cause; } @@ -168,6 +174,10 @@ public String dataStreamName() { return dataStreamName; } + public boolean isFailureIndex() { + return isFailureIndex; + } + public CreateIndexClusterStateUpdateRequest dataStreamName(String dataStreamName) { this.dataStreamName = dataStreamName; return this; @@ -228,6 +238,8 @@ public String toString() { + systemDataStreamDescriptor + ", matchingTemplate=" + matchingTemplate + + ", isFailureIndex=" + + isFailureIndex + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 007f274d7f493..130d6286f7e02 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -320,6 +320,12 @@ private Map> groupRequestsByShards( shard -> new ArrayList<>() ); shardRequests.add(bulkItemRequest); + } catch (DataStream.TimestampError timestampError) { + IndexDocFailureStoreStatus failureStoreStatus = processFailure(bulkItemRequest, clusterState, timestampError); + if (IndexDocFailureStoreStatus.USED.equals(failureStoreStatus) == false) { + String name = ia != null ? ia.getName() : docWriteRequest.index(); + addFailureAndDiscardRequest(docWriteRequest, bulkItemRequest.id(), name, timestampError, failureStoreStatus); + } } catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException | ResourceNotFoundException e) { String name = ia != null ? ia.getName() : docWriteRequest.index(); var failureStoreStatus = isFailureStoreRequest(docWriteRequest) @@ -545,6 +551,7 @@ private IndexDocFailureStoreStatus processFailure(BulkItemRequest bulkItemReques boolean added = addDocumentToRedirectRequests(bulkItemRequest, cause, failureStoreCandidate.getName()); if (added) { failureStoreMetrics.incrementFailureStore(bulkItemRequest.index(), errorType, FailureStoreMetrics.ErrorLocation.SHARD); + return IndexDocFailureStoreStatus.USED; } else { failureStoreMetrics.incrementRejected( bulkItemRequest.index(), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index bedf65e1a9c8b..4dcc7c73c280e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -1343,7 +1343,7 @@ public Index getWriteIndex(IndexRequest request, Metadata metadata) { + "]" ) .collect(Collectors.joining()); - throw new IllegalArgumentException( + throw new TimestampError( "the document timestamp [" + timestampAsString + "] is outside of ranges of currently writable indices [" @@ -1405,10 +1405,10 @@ private static Instant getTimeStampFromRaw(Object rawTimestamp) { } else if (rawTimestamp instanceof String sTimestamp) { return DateFormatters.from(TIMESTAMP_FORMATTER.parse(sTimestamp), TIMESTAMP_FORMATTER.locale()).toInstant(); } else { - throw new IllegalArgumentException("timestamp [" + rawTimestamp + "] type [" + rawTimestamp.getClass() + "] error"); + throw new TimestampError("timestamp [" + rawTimestamp + "] type [" + rawTimestamp.getClass() + "] error"); } } catch (Exception e) { - throw new IllegalArgumentException("Error get data stream timestamp field: " + e.getMessage(), e); + throw new TimestampError("Error get data stream timestamp field: " + e.getMessage(), e); } } @@ -1432,7 +1432,7 @@ private static Instant getTimestampFromParser(BytesReference source, XContentTyp ); }; } catch (Exception e) { - throw new IllegalArgumentException("Error extracting data stream timestamp field: " + e.getMessage(), e); + throw new TimestampError("Error extracting data stream timestamp field: " + e.getMessage(), e); } } @@ -1741,4 +1741,20 @@ public DataStream build() { ); } } + + /** + * This is a specialised error to capture that a document does not have a valid timestamp + * to index a document. It is mainly applicable for TSDS data streams because they need the timestamp + * to determine the write index. + */ + public static class TimestampError extends IllegalArgumentException { + + public TimestampError(String message, Exception cause) { + super(message, cause); + } + + public TimestampError(String message) { + super(message); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java index fd3fc1a732acb..7315e9f7a51d3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,8 @@ import org.elasticsearch.index.mapper.RoutingFieldMapper; import java.io.IOException; +import java.util.HashSet; +import java.util.Set; /** * A utility class that contains the mappings and settings logic for failure store indices that are a part of data streams. @@ -26,12 +29,30 @@ public class DataStreamFailureStoreDefinition { public static final String FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME = "data_streams.failure_store.refresh_interval"; + public static final String INDEX_FAILURE_STORE_VERSION_SETTING_NAME = "index.failure_store.version"; public static final Settings DATA_STREAM_FAILURE_STORE_SETTINGS; + // Only a subset of user configurable settings is applicable for a failure index. Here we have an + // allowlist that will filter all other settings out. + public static final Set SUPPORTED_USER_SETTINGS = Set.of( + DataTier.TIER_PREFERENCE, + IndexMetadata.SETTING_INDEX_HIDDEN, + INDEX_FAILURE_STORE_VERSION_SETTING_NAME, + IndexMetadata.SETTING_NUMBER_OF_SHARDS, + IndexMetadata.SETTING_NUMBER_OF_REPLICAS, + IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), + IndexMetadata.LIFECYCLE_NAME + ); + public static final Set SUPPORTED_USER_SETTINGS_PREFIXES = Set.of( + IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", + IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + ); public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; public static final int FAILURE_STORE_DEFINITION_VERSION = 1; public static final Setting FAILURE_STORE_DEFINITION_VERSION_SETTING = Setting.intSetting( - "index.failure_store.version", + INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 0, Setting.Property.IndexScope ); @@ -40,11 +61,6 @@ public class DataStreamFailureStoreDefinition { DATA_STREAM_FAILURE_STORE_SETTINGS = Settings.builder() // Always start with the hidden settings for a backing index. .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) - // Override any pipeline settings on the failure store to not use any - // specified by the data stream template. Default pipelines are very much - // meant for the backing indices only. - .putNull(IndexSettings.DEFAULT_PIPELINE.getKey()) - .putNull(IndexSettings.FINAL_PIPELINE.getKey()) .put(FAILURE_STORE_DEFINITION_VERSION_SETTING.getKey(), FAILURE_STORE_DEFINITION_VERSION) .build(); @@ -199,4 +215,23 @@ public static Settings.Builder applyFailureStoreSettings(Settings nodeSettings, } return builder; } + + /** + * Removes the unsupported by the failure store settings from the settings provided. + * ATTENTION: This method should be applied BEFORE we set the necessary settings for an index + * @param builder the settings builder that is going to be updated + * @return the original settings builder, with the unsupported settings removed. + */ + public static Settings.Builder filterUserDefinedSettings(Settings.Builder builder) { + if (builder.keys().isEmpty() == false) { + Set existingKeys = new HashSet<>(builder.keys()); + for (String setting : existingKeys) { + if (SUPPORTED_USER_SETTINGS.contains(setting) == false + && SUPPORTED_USER_SETTINGS_PREFIXES.stream().anyMatch(setting::startsWith) == false) { + builder.remove(setting); + } + } + } + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 2df9cf706d892..5dbf4da6f376f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -425,7 +425,8 @@ public static ClusterState createFailureStoreIndex( .nameResolvedInstant(nameResolvedInstant) .performReroute(false) .setMatchingTemplate(template) - .settings(indexSettings); + .settings(indexSettings) + .isFailureIndex(true); try { currentState = metadataCreateIndexService.applyCreateIndexRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 321719475c1f8..3accdd3881c6d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -983,6 +983,7 @@ static Settings aggregateIndexSettings( final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); final IndexMode templateIndexMode = Optional.of(request) + .filter(r -> r.isFailureIndex() == false) .map(CreateIndexClusterStateUpdateRequest::matchingTemplate) .map(metadata::retrieveIndexModeFromTemplate) .orElse(null); @@ -1038,11 +1039,13 @@ static Settings aggregateIndexSettings( // Finally, we actually add the explicit defaults prior to the template settings and the // request settings, so that the precedence goes: - // Explicit Defaults -> Template -> Request -> Necessary Settings (# of shards, uuid, etc) + // Explicit Defaults -> Template -> Request -> Filter out failure store settings -> Necessary Settings (# of shards, uuid, etc) indexSettingsBuilder.put(additionalIndexSettings.build()); indexSettingsBuilder.put(templateSettings.build()); } - + if (request.isFailureIndex()) { + DataStreamFailureStoreDefinition.filterUserDefinedSettings(indexSettingsBuilder); + } // now, put the request settings, so they override templates indexSettingsBuilder.put(requestSettings.build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java new file mode 100644 index 0000000000000..38d4031755a55 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinitionTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.cluster.routing.allocation.DataTier; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.cluster.metadata.DataStreamFailureStoreDefinition.INDEX_FAILURE_STORE_VERSION_SETTING_NAME; +import static org.hamcrest.Matchers.equalTo; + +public class DataStreamFailureStoreDefinitionTests extends ESTestCase { + + public void testSettingsFiltering() { + // Empty + Settings.Builder builder = Settings.builder(); + Settings.Builder expectedBuilder = Settings.builder(); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys(), equalTo(expectedBuilder.keys())); + + // All supported settings + builder.put(INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 3) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(DataTier.TIER_PREFERENCE, "data_cold") + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-10") + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s") + .put(IndexMetadata.LIFECYCLE_NAME, "my-policy") + .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)); + // We expect no changes + expectedBuilder = Settings.builder().put(builder.build()); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys(), equalTo(expectedBuilder.keys())); + + // Remove unsupported settings + String randomSetting = randomAlphaOfLength(10); + builder.put(INDEX_FAILURE_STORE_VERSION_SETTING_NAME, 3) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(DataTier.TIER_PREFERENCE, "data_cold") + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-10") + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s") + .put(IndexMetadata.LIFECYCLE_NAME, "my-policy") + .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "." + randomAlphaOfLength(4), randomAlphaOfLength(4)) + .put(IndexSettings.MODE.getKey(), randomFrom(IndexMode.values())) + .put(randomSetting, randomAlphaOfLength(10)); + // We expect no changes + expectedBuilder = Settings.builder().put(builder.build()); + assertThat( + DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().size(), + equalTo(expectedBuilder.keys().size() - 2) + ); + assertThat( + DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().contains(IndexSettings.MODE.getKey()), + equalTo(false) + ); + assertThat(DataStreamFailureStoreDefinition.filterUserDefinedSettings(builder).keys().contains(randomSetting), equalTo(false)); + } + +} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml new file mode 100644 index 0000000000000..21e4f49fe7af5 --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/20_failure_store.yml @@ -0,0 +1,99 @@ +--- +teardown: + - do: + indices.delete_data_stream: + name: my-logs-fs + ignore: 404 + + - do: + indices.delete_index_template: + name: template + ignore: 404 + + - do: + indices.delete_data_stream: + name: my-logs-db + ignore: 404 + - do: + indices.delete_index_template: + name: template1 + ignore: 404 + +--- +Test failure store with logsdb: + - requires: + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ logsdb_index_mode ] + - method: POST + path: /_bulk + capabilities: [ 'failure_store_status' ] + - method: PUT + path: /_bulk + capabilities: [ 'failure_store_status' ] + reason: "Support for 'logsdb' index mode & failure status capability required" + + - do: + allowed_warnings: + - "index template [my-template] has index patterns [my-logs-fs*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: ["my-logs-fs*"] + data_stream: + failure_store: true + template: + settings: + index: + mode: logsdb + number_of_replicas: 1 + number_of_shards: 2 + - do: + allowed_warnings: + - "index template [my-template2] has index patterns [my-logs-db*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" + indices.put_index_template: + name: my-template2 + body: + index_patterns: [ "my-logs-db*" ] + data_stream: {} + template: + settings: + index: + mode: logsdb + number_of_replicas: 1 + number_of_shards: 2 + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "my-logs-fs"} }' + - '{"@timestamp":"2019-08-06T12:09:12.375Z", "log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - '{ "create": { "_index": "my-logs-db"} }' + - '{ "@timestamp": "2022-01-01", "log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer" }' + - '{ "create": { "_index": "my-logs-fs"} }' + - '{"log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - '{ "create": { "_index": "my-logs-db"} }' + - '{"log.level": "INFO", "message":"Tomcat started on port(s): 8080 (http) with context path ''", "service.name":"spring-petclinic","process.thread.name":"restartedMain","log.logger":"org.springframework.boot.web.embedded.tomcat.TomcatWebServer"}' + - is_true: errors + + # Successfully indexed to backing index + - match: { items.0.create._index: '/\.ds-my-logs-fs-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.0.create.status: 201 } + - is_false: items.0.create.failure_store + - match: { items.1.create._index: '/\.ds-my-logs-db-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.1.create.status: 201 } + - is_false: items.1.create.failure_store + + # Successfully indexed to failure store + - match: { items.2.create._index: '/\.fs-my-logs-fs-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - match: { items.2.create.status: 201 } + - match: { items.2.create.failure_store: used } + + # Rejected, eligible to go to failure store, but failure store not enabled + - match: { items.3.create._index: '/\.ds-my-logs-db-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.3.create.status: 400 } + - match: { items.3.create.error.type: document_parsing_exception } + - match: { items.3.create.failure_store: not_enabled } From 477f0cd68b5b9e02dcca0ff04d7f8d9b75c23bc2 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 22 Oct 2024 09:41:55 +0200 Subject: [PATCH 62/67] Use pattern in wipeAllIndices and unmute testUpgradeMovesRepoToNewMetaVersion (#115232) The inference index added to the delete index call doesn't exist in all 8.x versions. AFAICT, since this is not a pattern, the wipeAllIndices call fails since it is not able to find that index. Using a wildcard instead seems to resolve the issue. Closes #114994 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 971fc161c4632..d4accd399cace 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,9 +308,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testUpgradeMovesRepoToNewMetaVersion - issue: https://github.com/elastic/elasticsearch/issues/114994 - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testReadOnlyRepo issue: https://github.com/elastic/elasticsearch/issues/114997 From 05fc23a44005bea35035737687bc9758f27d768e Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 22 Oct 2024 10:50:39 +0200 Subject: [PATCH 63/67] Unmute MultiVersionRepositoryAccessIT.testReadOnlyRepo (#115215) I think the test failure opened is not correct, and relates to another failure. Relates https://github.com/elastic/elasticsearch/issues/114999 and https://github.com/elastic/elasticsearch/issues/114997. --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d4accd399cace..1cb8baa96a942 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,9 +308,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testReadOnlyRepo - issue: https://github.com/elastic/elasticsearch/issues/114997 - class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT method: testCreateAndRestoreSnapshot issue: https://github.com/elastic/elasticsearch/issues/114998 From f32051f4629f6f1a4192a615ec0cb5e294089fb2 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 22 Oct 2024 11:09:19 +0200 Subject: [PATCH 64/67] fix: use setting instead of (#115193) --- docs/reference/mapping/types/binary.asciidoc | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/binary.asciidoc b/docs/reference/mapping/types/binary.asciidoc index 5733a28eb711a..81ba44c954e0a 100644 --- a/docs/reference/mapping/types/binary.asciidoc +++ b/docs/reference/mapping/types/binary.asciidoc @@ -68,8 +68,16 @@ Synthetic source may sort `binary` values in order of their byte representation. ---- PUT idx { + "settings": { + "index": { + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + }, "mappings": { - "_source": { "mode": "synthetic" }, "properties": { "binary": { "type": "binary", "doc_values": true } } From c7f53ff3b639555736e2b6d0864e537cacbc2d59 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 22 Oct 2024 12:30:22 +0100 Subject: [PATCH 65/67] [ML] Dynamically get of num allocations for ml node models (#115233) The GET inference API which should dynamically update the num_allocations field with the actual number from the deployed model which is useful when adaptive allocations are used --- .../inference/InferenceService.java | 4 + .../inference/CreateFromDeploymentIT.java | 63 ++++++++++++++++ .../xpack/inference/InferenceCrudIT.java | 6 ++ .../TransportGetInferenceModelAction.java | 73 +++++++++++++++---- .../ElasticsearchInternalModel.java | 6 +- .../ElasticsearchInternalService.java | 49 ++++++++++++- .../ElasticsearchInternalServiceSettings.java | 10 ++- .../ElserInternalModelTests.java | 30 ++++++++ 8 files changed, 219 insertions(+), 22 deletions(-) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index d437533a8603d..2c99563955746 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -210,4 +210,8 @@ default List defaultConfigIds() { default void defaultConfigs(ActionListener> defaultsListener) { defaultsListener.onResponse(List.of()); } + + default void updateModelsWithDynamicFields(List model, ActionListener> listener) { + listener.onResponse(model); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index f81ebc25dc860..0bfb6e9e43b03 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -109,6 +109,55 @@ public void testModelIdDoesNotMatch() throws IOException { ); } + public void testNumAllocationsIsUpdated() throws IOException { + var modelId = "update_num_allocations"; + var deploymentId = modelId; + + CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); + var response = startMlNodeDeploymemnt(modelId, deploymentId); + assertOkOrCreated(response); + + var inferenceId = "test_num_allocations_updated"; + var putModel = putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING); + var serviceSettings = putModel.get("service_settings"); + assertThat( + putModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "update_num_allocations", + "deployment_id", + "update_num_allocations" + ) + ) + ); + + assertOkOrCreated(updateMlNodeDeploymemnt(deploymentId, 2)); + + var updatedServiceSettings = getModel(inferenceId).get("service_settings"); + assertThat( + updatedServiceSettings.toString(), + updatedServiceSettings, + is( + Map.of( + "num_allocations", + 2, + "num_threads", + 1, + "model_id", + "update_num_allocations", + "deployment_id", + "update_num_allocations" + ) + ) + ); + } + private String endpointConfig(String deploymentId) { return Strings.format(""" { @@ -147,6 +196,20 @@ private Response startMlNodeDeploymemnt(String modelId, String deploymentId) thr return client().performRequest(request); } + private Response updateMlNodeDeploymemnt(String deploymentId, int numAllocations) throws IOException { + String endPoint = "/_ml/trained_models/" + deploymentId + "/deployment/_update"; + + var body = Strings.format(""" + { + "number_of_allocations": %d + } + """, numAllocations); + + Request request = new Request("POST", endPoint); + request.setJsonEntity(body); + return client().performRequest(request); + } + protected void stopMlNodeDeployment(String deploymentId) throws IOException { String endpoint = "/_ml/trained_models/" + deploymentId + "/deployment/_stop"; Request request = new Request("POST", endpoint); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index cbc50c361e3b5..37de2caadb475 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -24,6 +24,7 @@ import java.util.stream.Stream; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.hasSize; @@ -326,4 +327,9 @@ public void testSupportedStream() throws Exception { deleteModel(modelId); } } + + public void testGetZeroModels() throws IOException { + var models = getModels("_all", TaskType.RERANK); + assertThat(models, empty()); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index edcec45b50a16..01e663df4a3ea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -9,13 +9,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; @@ -29,8 +29,11 @@ import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.concurrent.Executor; +import java.util.stream.Collectors; public class TransportGetInferenceModelAction extends HandledTransportAction< GetInferenceModelAction.Request, @@ -96,39 +99,77 @@ private void getSingleModel( var model = service.get() .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()); - delegate.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); + + service.get() + .updateModelsWithDynamicFields( + List.of(model), + delegate.delegateFailureAndWrap( + (l2, updatedModels) -> l2.onResponse( + new GetInferenceModelAction.Response( + updatedModels.stream().map(Model::getConfigurations).collect(Collectors.toList()) + ) + ) + ) + ); })); } private void getAllModels(boolean persistDefaultEndpoints, ActionListener listener) { modelRegistry.getAllModels( persistDefaultEndpoints, - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) ); } private void getModelsByTaskType(TaskType taskType, ActionListener listener) { modelRegistry.getModelsByTaskType( taskType, - listener.delegateFailureAndWrap((l, models) -> executor.execute(ActionRunnable.supply(l, () -> parseModels(models)))) + listener.delegateFailureAndWrap((l, models) -> executor.execute(() -> parseModels(models, listener))) ); } - private GetInferenceModelAction.Response parseModels(List unparsedModels) { - var parsedModels = new ArrayList(); + private void parseModels(List unparsedModels, ActionListener listener) { + if (unparsedModels.isEmpty()) { + listener.onResponse(new GetInferenceModelAction.Response(List.of())); + return; + } - for (var unparsedModel : unparsedModels) { - var service = serviceRegistry.getService(unparsedModel.service()); - if (service.isEmpty()) { - throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + var parsedModelsByService = new HashMap>(); + try { + for (var unparsedModel : unparsedModels) { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + throw serviceNotFoundException(unparsedModel.service(), unparsedModel.inferenceEntityId()); + } + var list = parsedModelsByService.computeIfAbsent(service.get().name(), s -> new ArrayList<>()); + list.add( + service.get() + .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) + ); } - parsedModels.add( - service.get() - .parsePersistedConfig(unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings()) - .getConfigurations() + + var groupedListener = new GroupedActionListener>( + parsedModelsByService.entrySet().size(), + listener.delegateFailureAndWrap((delegate, listOfListOfModels) -> { + var modifiable = new ArrayList(); + for (var l : listOfListOfModels) { + modifiable.addAll(l); + } + modifiable.sort(Comparator.comparing(Model::getInferenceEntityId)); + delegate.onResponse( + new GetInferenceModelAction.Response(modifiable.stream().map(Model::getConfigurations).collect(Collectors.toList())) + ); + }) ); + + for (var entry : parsedModelsByService.entrySet()) { + serviceRegistry.getService(entry.getKey()) + .get() // must be non-null to get this far + .updateModelsWithDynamicFields(entry.getValue(), groupedListener); + } + } catch (Exception e) { + listener.onFailure(e); } - return new GetInferenceModelAction.Response(parsedModels); } private ElasticsearchStatusException serviceNotFoundException(String service, String inferenceId) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index d38def8dca47f..8b2969c39b7ba 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -21,7 +21,7 @@ public abstract class ElasticsearchInternalModel extends Model { - protected final ElasticsearchInternalServiceSettings internalServiceSettings; + protected ElasticsearchInternalServiceSettings internalServiceSettings; public ElasticsearchInternalModel( String inferenceEntityId, @@ -91,6 +91,10 @@ public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); } + public void updateNumAllocations(Integer numAllocations) { + this.internalServiceSettings.setNumAllocations(numAllocations); + } + @Override public String toString() { return Strings.toString(this.getConfigurations()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 389a9fa369c21..49919fda9f89d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; @@ -56,6 +57,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -786,11 +788,50 @@ public List defaultConfigIds() { ); } - /** - * Default configurations that can be out of the box without creating an endpoint first. - * @param defaultsListener Config listener - */ @Override + public void updateModelsWithDynamicFields(List models, ActionListener> listener) { + + if (models.isEmpty()) { + listener.onResponse(models); + return; + } + + var modelsByDeploymentIds = new HashMap(); + for (var model : models) { + assert model instanceof ElasticsearchInternalModel; + + if (model instanceof ElasticsearchInternalModel esModel) { + modelsByDeploymentIds.put(esModel.mlNodeDeploymentId(), esModel); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot update model [{}] as it is not an Elasticsearch service model", + RestStatus.INTERNAL_SERVER_ERROR, + model.getInferenceEntityId() + ) + ); + return; + } + } + + String deploymentIds = String.join(",", modelsByDeploymentIds.keySet()); + client.execute( + GetDeploymentStatsAction.INSTANCE, + new GetDeploymentStatsAction.Request(deploymentIds), + ActionListener.wrap(stats -> { + for (var deploymentStats : stats.getStats().results()) { + var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); + model.updateNumAllocations(deploymentStats.getNumberOfAllocations()); + } + listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); + }, e -> { + logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); + // continue with the original response + listener.onResponse(models); + }) + ); + } + public void defaultConfigs(ActionListener> defaultsListener) { preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index fedf48fb583a3..962c939146ef2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -39,7 +39,7 @@ public class ElasticsearchInternalServiceSettings implements ServiceSettings { public static final String DEPLOYMENT_ID = "deployment_id"; public static final String ADAPTIVE_ALLOCATIONS = "adaptive_allocations"; - private final Integer numAllocations; + private Integer numAllocations; private final int numThreads; private final String modelId; private final AdaptiveAllocationsSettings adaptiveAllocationsSettings; @@ -172,6 +172,10 @@ public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { : null; } + public void setNumAllocations(Integer numAllocations) { + this.numAllocations = numAllocations; + } + @Override public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { @@ -194,6 +198,10 @@ public String modelId() { return modelId; } + public String deloymentId() { + return modelId; + } + public Integer getNumAllocations() { return numAllocations; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java new file mode 100644 index 0000000000000..96cd42efa42f5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +public class ElserInternalModelTests extends ESTestCase { + public void testUpdateNumAllocation() { + var model = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElasticsearchInternalService.NAME, + new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserMlNodeTaskSettings(), + null + ); + + model.updateNumAllocations(1); + assertEquals(1, model.getServiceSettings().getNumAllocations().intValue()); + + model.updateNumAllocations(null); + assertNull(model.getServiceSettings().getNumAllocations()); + } +} From 332c9224f2f7c774dc470c65c20fd2836b9f3ee9 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Tue, 22 Oct 2024 09:09:21 -0400 Subject: [PATCH 66/67] onProcessFileChangesException (#115038) --- .../common/file/AbstractFileWatchingService.java | 16 ++++++++++++---- .../service/FileSettingsService.java | 12 +++++++++++- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index a900722397edd..41998bf974bf9 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -313,12 +313,20 @@ void processSettingsOnServiceStartAndNotifyListeners() throws InterruptedExcepti void processSettingsAndNotifyListeners() throws InterruptedException { try { processFileChanges(); - for (var listener : eventListeners) { - listener.watchedFileChanged(); - } } catch (IOException | ExecutionException e) { - logger.error(() -> "Error processing watched file: " + watchedFile(), e); + onProcessFileChangesException(e); + return; } + for (var listener : eventListeners) { + listener.watchedFileChanged(); + } + } + + /** + * Called for checked exceptions only. + */ + protected void onProcessFileChangesException(Exception e) { + logger.error(() -> "Error processing watched file: " + watchedFile(), e); } // package private for testing diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 811b59465ce76..601fc3c86d98f 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; @@ -144,7 +145,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException { + protected void onProcessFileChangesException(Exception e) { + if (e instanceof ExecutionException && e.getCause() instanceof FailedToCommitClusterStateException f) { + logger.error("Unable to commit cluster state", e); + } else { + super.onProcessFileChangesException(e); + } + } + + @Override + protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); From 003fbc73f6f4913135acf6ce4484e8e8ba032251 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 22 Oct 2024 16:26:30 +0300 Subject: [PATCH 67/67] Adding validation for incompatibility of compound retrievers and scroll (#115106) --- .../search/builder/SearchSourceBuilder.java | 2 +- .../retriever/CompoundRetrieverBuilder.java | 10 +++++++--- .../search/retriever/RetrieverBuilder.java | 1 + .../action/search/SearchRequestTests.java | 15 ++++++++++++++- 4 files changed, 23 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 6ceb02f0e797f..9c96319136007 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -2179,7 +2179,7 @@ public ActionRequestValidationException validate( boolean allowPartialSearchResults ) { if (retriever() != null) { - validationException = retriever().validate(this, validationException, allowPartialSearchResults); + validationException = retriever().validate(this, validationException, isScroll, allowPartialSearchResults); List specified = new ArrayList<>(); if (subSearches().isEmpty() == false) { specified.add(QUERY_FIELD.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 85dabf6eb6465..7373bc5b75049 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -176,9 +176,10 @@ public final void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceB public ActionRequestValidationException validate( SearchSourceBuilder source, ActionRequestValidationException validationException, + boolean isScroll, boolean allowPartialSearchResults ) { - validationException = super.validate(source, validationException, allowPartialSearchResults); + validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults); if (source.size() > rankWindowSize) { validationException = addValidationError( "[" @@ -194,12 +195,15 @@ public ActionRequestValidationException validate( } if (allowPartialSearchResults) { validationException = addValidationError( - "cannot specify a compound retriever and [allow_partial_search_results]", + "cannot specify [" + getName() + "] and [allow_partial_search_results]", validationException ); } + if (isScroll) { + validationException = addValidationError("cannot specify [" + getName() + "] and [scroll]", validationException); + } for (RetrieverSource innerRetriever : innerRetrievers) { - validationException = innerRetriever.retriever().validate(source, validationException, allowPartialSearchResults); + validationException = innerRetriever.retriever().validate(source, validationException, isScroll, allowPartialSearchResults); } return validationException; } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index 882d44adb79c3..5e36ad0fd4fd6 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -239,6 +239,7 @@ public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { public ActionRequestValidationException validate( SearchSourceBuilder source, ActionRequestValidationException validationException, + boolean isScroll, boolean allowPartialSearchResults ) { return validationException; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index c6ca97fd5694a..526961d74bf52 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -291,10 +291,23 @@ public void testValidate() throws IOException { assertNotNull(validationErrors); assertEquals(1, validationErrors.validationErrors().size()); assertEquals( - "cannot specify a compound retriever and [allow_partial_search_results]", + "cannot specify [test_compound_retriever_builder] and [allow_partial_search_results]", validationErrors.validationErrors().get(0) ); } + { + // scroll and compound retriever + SearchRequest searchRequest = createSearchRequest().source( + new SearchSourceBuilder().retriever(new TestCompoundRetrieverBuilder(randomIntBetween(1, 10))) + ); + searchRequest.allowPartialSearchResults(false); + searchRequest.scroll(TimeValue.timeValueMinutes(1)); + searchRequest.requestCache(false); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("cannot specify [test_compound_retriever_builder] and [scroll]", validationErrors.validationErrors().get(0)); + } { // allow_partial_results and non-compound retriever SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder().retriever(new RetrieverBuilder() {