diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 7ae9a812048fb..e6f27211525f5 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -65,6 +65,10 @@ public class AggregatorBenchmark { private static final int GROUPS = 5; private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); private static final String LONGS = "longs"; private static final String INTS = "ints"; @@ -446,7 +450,7 @@ private static Block dataBlock(BlockFactory blockFactory, String blockType) { BLOCK_LENGTH ).asBlock(); case MULTIVALUED_LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); builder.beginPositionEntry(); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); @@ -459,7 +463,7 @@ private static Block dataBlock(BlockFactory blockFactory, String blockType) { yield builder.build(); } case HALF_NULL_LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); builder.appendNull(); @@ -467,7 +471,7 @@ private static Block dataBlock(BlockFactory blockFactory, String blockType) { yield builder.build(); } case HALF_NULL_DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendDouble(i); builder.appendNull(); @@ -499,7 +503,7 @@ private static Block groupingBlock(String grouping, String blockType) { }; return switch (grouping) { case LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendLong(i % GROUPS); @@ -508,7 +512,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case INTS -> { - var builder = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendInt(i % GROUPS); @@ -517,7 +521,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendDouble(i % GROUPS); @@ -526,7 +530,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case BOOLEANS -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(BLOCK_LENGTH); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendBoolean(i % 2 == 1); @@ -535,7 +539,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case BYTES_REFS -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(BLOCK_LENGTH); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendBytesRef(bytesGroup(i % GROUPS)); @@ -582,9 +586,6 @@ private static void run(String grouping, String op, String blockType, int opCoun } static DriverContext driverContext() { - return new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) - ); + return new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java index 33a39458ce8e3..e0281dbb856d4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java @@ -9,6 +9,7 @@ package org.elasticsearch.benchmark.compute.operator; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.BytesRefArray; @@ -75,6 +76,7 @@ public class BlockBenchmark { public static final String[] RELEVANT_TYPE_BLOCK_COMBINATIONS = { "boolean/array", "boolean/array-multivalue-null", + "boolean/big-array", "boolean/big-array-multivalue-null", "boolean/vector", "boolean/vector-big-array", @@ -85,18 +87,21 @@ public class BlockBenchmark { "BytesRef/vector-const", "double/array", "double/array-multivalue-null", + "double/big-array", "double/big-array-multivalue-null", "double/vector", "double/vector-big-array", "double/vector-const", "int/array", "int/array-multivalue-null", + "int/big-array", "int/big-array-multivalue-null", "int/vector", "int/vector-big-array", "int/vector-const", "long/array", "long/array-multivalue-null", + "long/big-array", "long/big-array-multivalue-null", "long/vector", "long/vector-big-array", @@ -111,6 +116,11 @@ public class BlockBenchmark { private static final Random random = new Random(); + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + static { // Smoke test all the expected values and force loading subclasses more like prod int totalPositions = 10; @@ -131,7 +141,6 @@ public class BlockBenchmark { private record BenchmarkBlocks(Block[] blocks, long[] checkSums) {}; private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, int totalPositions) { - BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); Block[] blocks = new Block[NUM_BLOCKS_PER_ITERATION]; long[] checkSums = new long[NUM_BLOCKS_PER_ITERATION]; @@ -172,6 +181,23 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in Block.MvOrdering.UNORDERED ); } + case "big-array" -> { + BitArray valuesBigArray = new BitArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < values.length; i++) { + if (values[i]) { + valuesBigArray.set(i); + } + } + + blocks[blockIndex] = new BooleanBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } case "big-array-multivalue-null" -> { int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); int positionCount = firstValueIndexes.length - 1; @@ -189,7 +215,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in firstValueIndexes, nulls, Block.MvOrdering.UNORDERED, - BlockFactory.getNonBreakingInstance() + blockFactory ); } case "vector" -> { @@ -310,13 +336,26 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in Block.MvOrdering.UNORDERED ); } + case "big-array" -> { + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new DoubleBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } case "big-array-multivalue-null" -> { int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); int positionCount = firstValueIndexes.length - 1; BitSet nulls = randomNulls(positionCount); - DoubleArray valuesBigArray = BlockFactory.getNonBreakingInstance() - .bigArrays() - .newDoubleArray(totalPositions, false); + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -327,7 +366,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in firstValueIndexes, nulls, Block.MvOrdering.UNORDERED, - BlockFactory.getNonBreakingInstance() + blockFactory ); } case "vector" -> { @@ -335,9 +374,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in blocks[blockIndex] = vector.asBlock(); } case "vector-big-array" -> { - DoubleArray valuesBigArray = BlockFactory.getNonBreakingInstance() - .bigArrays() - .newDoubleArray(totalPositions, false); + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -391,11 +428,26 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in Block.MvOrdering.UNORDERED ); } + case "big-array" -> { + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new IntBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } case "big-array-multivalue-null" -> { int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); int positionCount = firstValueIndexes.length - 1; BitSet nulls = randomNulls(positionCount); - IntArray valuesBigArray = BlockFactory.getNonBreakingInstance().bigArrays().newIntArray(totalPositions, false); + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -406,7 +458,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in firstValueIndexes, nulls, Block.MvOrdering.UNORDERED, - BlockFactory.getNonBreakingInstance() + blockFactory ); } case "vector" -> { @@ -414,7 +466,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in blocks[blockIndex] = vector.asBlock(); } case "vector-big-array" -> { - IntArray valuesBigArray = BlockFactory.getNonBreakingInstance().bigArrays().newIntArray(totalPositions, false); + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -468,13 +520,26 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in Block.MvOrdering.UNORDERED ); } + case "big-array" -> { + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new LongBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } case "big-array-multivalue-null" -> { int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); int positionCount = firstValueIndexes.length - 1; BitSet nulls = randomNulls(positionCount); - LongArray valuesBigArray = BlockFactory.getNonBreakingInstance() - .bigArrays() - .newLongArray(totalPositions, false); + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -485,7 +550,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in firstValueIndexes, nulls, Block.MvOrdering.UNORDERED, - BlockFactory.getNonBreakingInstance() + blockFactory ); } case "vector" -> { @@ -493,9 +558,7 @@ private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, in blocks[blockIndex] = vector.asBlock(); } case "vector-big-array" -> { - LongArray valuesBigArray = BlockFactory.getNonBreakingInstance() - .bigArrays() - .newLongArray(totalPositions, false); + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); for (int i = 0; i < values.length; i++) { valuesBigArray.set(i, values[i]); } @@ -718,6 +781,7 @@ private static boolean isRandom(String accessType) { { "boolean/array", "boolean/array-multivalue-null", + "boolean/big-array", "boolean/big-array-multivalue-null", "boolean/vector", "boolean/vector-big-array", @@ -728,18 +792,21 @@ private static boolean isRandom(String accessType) { "BytesRef/vector-const", "double/array", "double/array-multivalue-null", + "double/big-array", "double/big-array-multivalue-null", "double/vector", "double/vector-big-array", "double/vector-const", "int/array", "int/array-multivalue-null", + "int/big-array", "int/big-array-multivalue-null", "int/vector", "int/vector-big-array", "int/vector-const", "long/array", "long/array-multivalue-null", + "long/big-array", "long/big-array-multivalue-null", "long/vector", "long/vector-big-array", diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 3a1142ad87d2f..1765897ba35e7 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -59,6 +58,12 @@ @State(Scope.Thread) @Fork(1) public class EvalBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + private static final int BLOCK_LENGTH = 8 * 1024; static final DriverContext driverContext = new DriverContext( @@ -207,15 +212,15 @@ private static void checkExpected(String operation, Page actual) { private static Page page(String operation) { return switch (operation) { case "abs", "add", "date_trunc", "equal_to_const" -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i * 100_000); } yield new Page(builder.build()); } case "long_equal_to_long" -> { - var lhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); - var rhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { lhs.appendLong(i * 100_000); rhs.appendLong(i * 100_000); @@ -223,8 +228,8 @@ private static Page page(String operation) { yield new Page(lhs.build(), rhs.build()); } case "long_equal_to_int" -> { - var lhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); - var rhs = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + var rhs = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { lhs.appendLong(i * 100_000); rhs.appendInt(i * 100_000); @@ -232,7 +237,7 @@ private static Page page(String operation) { yield new Page(lhs.build(), rhs.build()); } case "mv_min", "mv_min_ascending" -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); if (operation.endsWith("ascending")) { builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java index 09cdc8b269ad3..c32aa1184ddaa 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -43,6 +45,12 @@ @State(Scope.Thread) @Fork(1) public class MultivalueDedupeBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + @Param({ "BOOLEAN", "BYTES_REF", "DOUBLE", "INT", "LONG" }) private ElementType elementType; @@ -58,7 +66,7 @@ public class MultivalueDedupeBenchmark { public void setup() { this.block = switch (elementType) { case BOOLEAN -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -77,7 +85,7 @@ public void setup() { yield builder.build(); } case BYTES_REF -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -96,7 +104,7 @@ public void setup() { yield builder.build(); } case DOUBLE -> { - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -115,7 +123,7 @@ public void setup() { yield builder.build(); } case INT -> { - IntBlock.Builder builder = IntBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + IntBlock.Builder builder = blockFactory.newIntBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -134,7 +142,7 @@ public void setup() { yield builder.build(); } case LONG -> { - LongBlock.Builder builder = LongBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + LongBlock.Builder builder = blockFactory.newLongBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (long i = 0; i < size; i++) { @@ -159,18 +167,18 @@ public void setup() { @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void adaptive() { - MultivalueDedupe.dedupeToBlockAdaptive(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockAdaptive(block, blockFactory).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void copyAndSort() { - MultivalueDedupe.dedupeToBlockUsingCopyAndSort(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockUsingCopyAndSort(block, blockFactory).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void copyMissing() { - MultivalueDedupe.dedupeToBlockUsingCopyMissing(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockUsingCopyMissing(block, blockFactory).close(); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index d723ea3e1a6b3..3d5a36ea288b4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -10,16 +10,15 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.topn.TopNEncoder; @@ -51,6 +50,12 @@ @State(Scope.Thread) @Fork(1) public class TopNBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + private static final int BLOCK_LENGTH = 8 * 1024; private static final String LONGS = "longs"; @@ -110,7 +115,7 @@ private static Operator operator(String data, int topCount) { ClusterSettings.createBuiltInClusterSettings() ); return new TopNOperator( - BlockFactory.getNonBreakingInstance(), + blockFactory, breakerService.getBreaker(CircuitBreaker.REQUEST), topCount, elementTypes, @@ -137,35 +142,35 @@ private static Page page(String data) { private static Block block(String data) { return switch (data) { case LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); } yield builder.build(); } case INTS -> { - var builder = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendInt(i); } yield builder.build(); } case DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendDouble(i); } yield builder.build(); } case BOOLEANS -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(BLOCK_LENGTH); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendBoolean(i % 2 == 1); } yield builder.build(); } case BYTES_REFS -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(BLOCK_LENGTH); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendBytesRef(new BytesRef(Integer.toString(i))); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index 11d6d6dc2e64f..3952ae5200b79 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -22,7 +22,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -78,6 +80,11 @@ public class ValuesSourceReaderBenchmark { private static final int BLOCK_LENGTH = 16 * 1024; private static final int INDEX_SIZE = 10 * BLOCK_LENGTH; private static final int COMMIT_INTERVAL = 500; + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); static { // Smoke test all the expected values and force loading subclasses more like prod @@ -246,7 +253,7 @@ private static BlockLoader numericBlockLoader(String name, Where where, NumberFi @OperationsPerInvocation(INDEX_SIZE) public void benchmark() { ValuesSourceReaderOperator op = new ValuesSourceReaderOperator( - BlockFactory.getNonBreakingInstance(), + blockFactory, fields(name), List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { throw new UnsupportedOperationException("can't load _source here"); @@ -379,7 +386,7 @@ private void setupPages() { pages = new ArrayList<>(); switch (layout) { case "in_order" -> { - IntVector.Builder docs = IntVector.newVectorBuilder(BLOCK_LENGTH); + IntVector.Builder docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); for (LeafReaderContext ctx : reader.leaves()) { int begin = 0; while (begin < ctx.reader().maxDoc()) { @@ -390,14 +397,14 @@ private void setupPages() { pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, end - begin).asVector(), - IntBlock.newConstantBlockWith(ctx.ord, end - begin).asVector(), + blockFactory.newConstantIntBlockWith(0, end - begin).asVector(), + blockFactory.newConstantIntBlockWith(ctx.ord, end - begin).asVector(), docs.build(), true ).asBlock() ) ); - docs = IntVector.newVectorBuilder(BLOCK_LENGTH); + docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); begin = end; } } @@ -408,8 +415,8 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} for (LeafReaderContext ctx : reader.leaves()) { docItrs.add(new ItrAndOrd(IntStream.range(0, ctx.reader().maxDoc()).iterator(), ctx.ord)); } - IntVector.Builder docs = IntVector.newVectorBuilder(BLOCK_LENGTH); - IntVector.Builder leafs = IntVector.newVectorBuilder(BLOCK_LENGTH); + IntVector.Builder docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); + IntVector.Builder leafs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); int size = 0; while (docItrs.isEmpty() == false) { Iterator itrItr = docItrs.iterator(); @@ -425,12 +432,11 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} if (size >= BLOCK_LENGTH) { pages.add( new Page( - new DocVector(IntBlock.newConstantBlockWith(0, size).asVector(), leafs.build(), docs.build(), null) - .asBlock() + new DocVector(blockFactory.newConstantIntVector(0, size), leafs.build(), docs.build(), null).asBlock() ) ); - docs = IntVector.newVectorBuilder(BLOCK_LENGTH); - leafs = IntVector.newVectorBuilder(BLOCK_LENGTH); + docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); + leafs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); size = 0; } } @@ -439,7 +445,7 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, size).asVector(), + blockFactory.newConstantIntBlockWith(0, size).asVector(), leafs.build().asBlock().asVector(), docs.build(), null @@ -465,9 +471,9 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(next.ord, 1).asVector(), - IntBlock.newConstantBlockWith(next.itr.nextInt(), 1).asVector(), + blockFactory.newConstantIntVector(0, 1), + blockFactory.newConstantIntVector(next.ord, 1), + blockFactory.newConstantIntVector(next.itr.nextInt(), 1), true ).asBlock() ) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index f691d4bd996a7..aaae18401685a 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -79,6 +79,7 @@ if (BuildParams.inFipsJvm) { // with no x-pack. Tests having security explicitly enabled/disabled will override this setting setting 'xpack.security.enabled', 'false' setting 'xpack.security.fips_mode.enabled', 'true' + setting 'xpack.security.fips_mode.required_providers', '["BCFIPS", "BCJSSE"]' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.authc.password_hashing.algorithm', 'pbkdf2_stretch' keystorePassword 'keystore-password' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java index f8ab8eef1004c..c8ce9d5ca2c71 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.gradle.internal.ResolveAllDependencies; import org.gradle.api.Plugin; import org.gradle.api.Project; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; @@ -26,9 +27,12 @@ public void apply(Project project) { var cacheTestFixturesConfiguration = project.getConfigurations().create(CACHE_TEST_FIXTURES); cacheTestFixturesConfiguration.defaultDependencies(deps -> { DependencyHandler dependencyHandler = project.getDependencies(); - deps.add(dependencyHandler.create("org.reflections:reflections:" + VersionProperties.getVersions().get("reflections"))); - deps.add(dependencyHandler.create("org.javassist:javassist:" + VersionProperties.getVersions().get("javassist"))); + Dependency reflections = dependencyHandler.create( + "org.reflections:reflections:" + VersionProperties.getVersions().get("reflections") + ); + deps.add(reflections); }); + project.getPlugins().withType(JavaPlugin.class, javaPlugin -> { var cacheTestFixtures = project.getTasks().register(CACHE_TEST_FIXTURES, CacheCacheableTestFixtures.class, (t) -> { var testSourceSet = project.getExtensions() diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 614ff159a986c..54bc80e0c08c2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -48,8 +48,7 @@ ductTape = 1.0.8 commonsCompress = 1.24.0 # packer caching build logic -reflections = 0.9.12 -javassist = 3.28.0-GA +reflections = 0.10.2 # benchmark dependencies jmh = 1.26 diff --git a/docs/changelog/101640.yaml b/docs/changelog/101640.yaml new file mode 100644 index 0000000000000..6f61a3a3ffd84 --- /dev/null +++ b/docs/changelog/101640.yaml @@ -0,0 +1,5 @@ +pr: 101640 +summary: Support cross clusters query in ESQL +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/102584.yaml b/docs/changelog/102584.yaml new file mode 100644 index 0000000000000..44ff5dd9f7461 --- /dev/null +++ b/docs/changelog/102584.yaml @@ -0,0 +1,5 @@ +pr: 102584 +summary: Expose some ML metrics via APM +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/103135.yaml b/docs/changelog/103135.yaml deleted file mode 100644 index 69e5cc88eb7f4..0000000000000 --- a/docs/changelog/103135.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103135 -summary: Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` -area: Transform -type: bug -issues: - - 103115 diff --git a/docs/changelog/103461.yaml b/docs/changelog/103461.yaml new file mode 100644 index 0000000000000..3a1bf30aa90c9 --- /dev/null +++ b/docs/changelog/103461.yaml @@ -0,0 +1,5 @@ +pr: 103461 +summary: Add support for Well Known Binary (WKB) in the fields API for spatial fields +area: Geo +type: enhancement +issues: [] diff --git a/docs/changelog/103591.yaml b/docs/changelog/103591.yaml new file mode 100644 index 0000000000000..41b6e362c5713 --- /dev/null +++ b/docs/changelog/103591.yaml @@ -0,0 +1,6 @@ +pr: 103591 +summary: Wait for the model results on graceful shutdown +area: Machine Learning +type: bug +issues: + - 103414 diff --git a/docs/changelog/103615.yaml b/docs/changelog/103615.yaml new file mode 100644 index 0000000000000..69498c749687f --- /dev/null +++ b/docs/changelog/103615.yaml @@ -0,0 +1,5 @@ +pr: 103615 +summary: Fix downsample api by returning a failure in case one or more downsample persistent tasks failed +area: Downsampling +type: bug +issues: [] diff --git a/docs/changelog/103628.yaml b/docs/changelog/103628.yaml new file mode 100644 index 0000000000000..42259c7bcde46 --- /dev/null +++ b/docs/changelog/103628.yaml @@ -0,0 +1,5 @@ +pr: 103628 +summary: Add ES|QL async delete API +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103633.yaml b/docs/changelog/103633.yaml new file mode 100644 index 0000000000000..9e36451caafd8 --- /dev/null +++ b/docs/changelog/103633.yaml @@ -0,0 +1,5 @@ +pr: 103633 +summary: Update s3 latency metric to use micros +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/103643.yaml b/docs/changelog/103643.yaml new file mode 100644 index 0000000000000..966fb57acf566 --- /dev/null +++ b/docs/changelog/103643.yaml @@ -0,0 +1,5 @@ +pr: 103643 +summary: "[Profiling] Use shard request cache consistently" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/103646.yaml b/docs/changelog/103646.yaml new file mode 100644 index 0000000000000..b7a6fae025771 --- /dev/null +++ b/docs/changelog/103646.yaml @@ -0,0 +1,5 @@ +pr: 103646 +summary: Add index mapping parameter for `counted_keyword` +area: Aggregations +type: enhancement +issues: [] diff --git a/docs/changelog/103669.yaml b/docs/changelog/103669.yaml new file mode 100644 index 0000000000000..57361b9d842e4 --- /dev/null +++ b/docs/changelog/103669.yaml @@ -0,0 +1,5 @@ +pr: 103669 +summary: Validate inference model ids +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/103670.yaml b/docs/changelog/103670.yaml new file mode 100644 index 0000000000000..ad3f0519b5d19 --- /dev/null +++ b/docs/changelog/103670.yaml @@ -0,0 +1,5 @@ +pr: 103670 +summary: "ESQL: Improve local folding of aggregates" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/103710.yaml b/docs/changelog/103710.yaml new file mode 100644 index 0000000000000..539b9f553ccc2 --- /dev/null +++ b/docs/changelog/103710.yaml @@ -0,0 +1,5 @@ +pr: 103710 +summary: List hidden shard stores by default +area: Store +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 258ea2e7286fa..da9c6e3cf3136 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -64,7 +64,7 @@ complete results in the `wait_for_completion_timeout` parameter. [source,console] ---- -GET /_query/async/get/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s +GET /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s ---- // TEST[skip: no access to search ID - may return response values] @@ -80,3 +80,13 @@ finished, and the results are returned. } ---- // TEST[skip: no access to search ID - may return response values] + +Use the <> to +delete an async search before the `keep_alive` period ends. If the query +is still running, {es} cancels it. + +[source,console] +---- +DELETE /_query/async/delete/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= +---- +// TEST[skip: no access to search ID] diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg new file mode 100644 index 0000000000000..01fe0a4770156 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_degrees.svg @@ -0,0 +1 @@ +TO_DEGREES(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc index 7783d08bc3aaa..f4125d5db6db4 100644 --- a/docs/reference/esql/functions/types/add.asciidoc +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -2,11 +2,17 @@ |=== lhs | rhs | result date_period | date_period | date_period -date_period | datetime | datetime datetime | date_period | datetime datetime | time_duration | datetime double | double | double +double | integer | double +double | long | double +integer | double | double integer | integer | integer +integer | long | long +long | double | double +long | integer | long long | long | long time_duration | time_duration | time_duration +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc new file mode 100644 index 0000000000000..7cb7ca46022c2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index 75d1b07ea3851..9a9f642daa3f4 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -9,7 +9,7 @@ Returns documents that contain an indexed value for a field. An indexed value may not exist for a document's field due to a variety of reasons: * The field in the source JSON is `null` or `[]` -* The field has `"index" : false` set in the mapping +* The field has `"index" : false` and `"doc_values" : false` set in the mapping * The length of the field value exceeded an `ignore_above` setting in the mapping * The field value was malformed and `ignore_malformed` was defined in the mapping diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index f4875fd096b00..78850f617ee65 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -71,6 +71,11 @@ the sensitive nature of the information. (<>) Enables fips mode of operation. Set this to `true` if you run this {es} instance in a FIPS 140-2 enabled JVM. For more information, see <>. Defaults to `false`. +`xpack.security.fips_mode.required_providers`:: +(<>) +Optionally enforce specific Java JCE/JSSE security providers. For example, set this to `["BCFIPS", "BCJSSE"]` (case-insensitive) to require +the Bouncy Castle FIPS JCE and JSSE security providers. Only applicable when `xpack.security.fips_mode.enabled` is set to `true`. + [discrete] [[password-hashing-settings]] ==== Password hashing settings diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index c12ae87ee65fe..dc045ba09e531 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -11,9 +11,12 @@ apply plugin: 'elasticsearch.publish' dependencies { api 'net.sf.jopt-simple:jopt-simple:5.0.2' api project(':libs:elasticsearch-core') + + testImplementation(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'elasticsearch-cli' + } } -tasks.named("test").configure { enabled = false } // Since CLI does not depend on :server, it cannot run the jarHell task tasks.named("jarHell").configure { enabled = false } diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java index 856dfc6a5a078..69cb76636a996 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java @@ -18,6 +18,8 @@ import java.io.OutputStream; import java.io.PrintWriter; import java.io.Reader; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Locale; @@ -274,8 +276,8 @@ public boolean isHeadless() { } private static class ConsoleTerminal extends Terminal { - - private static final Console CONSOLE = System.console(); + private static final int JDK_VERSION_WITH_IS_TERMINAL = 22; + private static final Console CONSOLE = detectTerminal(); ConsoleTerminal() { super(CONSOLE.reader(), CONSOLE.writer(), ERROR_WRITER); @@ -285,6 +287,23 @@ static boolean isSupported() { return CONSOLE != null; } + static Console detectTerminal() { + // JDK >= 22 returns a console even if the terminal is redirected unless using -Djdk.console=java.base + // https://bugs.openjdk.org/browse/JDK-8308591 + Console console = System.console(); + if (console != null && Runtime.version().feature() >= JDK_VERSION_WITH_IS_TERMINAL) { + try { + // verify the console is a terminal using isTerminal() on JDK >= 22 + // TODO: Remove reflection once Java 22 sources are supported, e.g. using a MRJAR + Method isTerminal = Console.class.getMethod("isTerminal"); + return Boolean.TRUE.equals(isTerminal.invoke(console)) ? console : null; + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + return console; + } + @Override public String readText(String prompt) { return CONSOLE.readLine("%s", prompt); diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java new file mode 100644 index 0000000000000..9c1faf911a829 --- /dev/null +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cli; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; + +@WithoutSecurityManager +public class TerminalTests extends ESTestCase { + + public void testSystemTerminalIfRedirected() { + // Expect system terminal if redirected for tests. + // To force new behavior in JDK 22 this should run without security manager. + // Otherwise, JDK 22 doesn't provide a console if redirected. + assertEquals(Terminal.SystemTerminal.class, Terminal.DEFAULT.getClass()); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 49fdc44681aa3..f021eb61ca753 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -30,11 +30,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.tracing.SpanId; +import org.elasticsearch.telemetry.tracing.TraceContext; +import org.elasticsearch.telemetry.tracing.Traceable; import java.security.AccessController; import java.security.PrivilegedAction; @@ -61,7 +61,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private static final Logger logger = LogManager.getLogger(APMTracer.class); /** Holds in-flight span information. */ - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private volatile boolean enabled; private volatile APMServices services; @@ -160,8 +160,9 @@ private void destroyApmServices() { } @Override - public void startTrace(ThreadContext threadContext, SpanId spanId, String spanName, @Nullable Map attributes) { - assert threadContext != null; + public void startTrace(TraceContext traceContext, Traceable traceable, String spanName, @Nullable Map attributes) { + assert traceContext != null; + String spanId = traceable.getSpanId(); assert spanId != null; assert spanName != null; @@ -182,21 +183,21 @@ public void startTrace(ThreadContext threadContext, SpanId spanId, String spanNa // A span can have a parent span, which here is modelled though a parent span context. // Setting this is important for seeing a complete trace in the APM UI. - final Context parentContext = getParentContext(threadContext); + final Context parentContext = getParentContext(traceContext); if (parentContext != null) { spanBuilder.setParent(parentContext); } - setSpanAttributes(threadContext, attributes, spanBuilder); + setSpanAttributes(traceContext, attributes, spanBuilder); - Instant startTime = threadContext.getTransient(Task.TRACE_START_TIME); + Instant startTime = traceContext.getTransient(Task.TRACE_START_TIME); if (startTime != null) { spanBuilder.setStartTimestamp(startTime); } final Span span = spanBuilder.startSpan(); final Context contextForNewSpan = Context.current().with(span); - updateThreadContext(threadContext, services, contextForNewSpan); + updateThreadContext(traceContext, services, contextForNewSpan); return contextForNewSpan; })); @@ -221,29 +222,29 @@ public void startTrace(String name, Map attributes) { spanBuilder.startSpan(); } - private static void updateThreadContext(ThreadContext threadContext, APMServices services, Context context) { + private static void updateThreadContext(TraceContext traceContext, APMServices services, Context context) { // The new span context can be used as the parent context directly within the same Java process... - threadContext.putTransient(Task.APM_TRACE_CONTEXT, context); + traceContext.putTransient(Task.APM_TRACE_CONTEXT, context); - // ...whereas for tasks sent to other ES nodes, we need to put trace HTTP headers into the threadContext so + // ...whereas for tasks sent to other ES nodes, we need to put trace HTTP headers into the traceContext so // that they can be propagated. - services.openTelemetry.getPropagators().getTextMapPropagator().inject(context, threadContext, (tc, key, value) -> { + services.openTelemetry.getPropagators().getTextMapPropagator().inject(context, traceContext, (tc, key, value) -> { if (isSupportedContextKey(key)) { tc.putHeader(key, value); } }); } - private Context getParentContext(ThreadContext threadContext) { + private Context getParentContext(TraceContext traceContext) { // https://github.com/open-telemetry/opentelemetry-java/discussions/2884#discussioncomment-381870 // If you just want to propagate across threads within the same process, you don't need context propagators (extract/inject). // You can just pass the Context object directly to another thread (it is immutable and thus thread-safe). // Attempt to fetch a local parent context first, otherwise look for a remote parent - Context parentContext = threadContext.getTransient("parent_" + Task.APM_TRACE_CONTEXT); + Context parentContext = traceContext.getTransient("parent_" + Task.APM_TRACE_CONTEXT); if (parentContext == null) { - final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); - final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); + final String traceParentHeader = traceContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + final String traceStateHeader = traceContext.getTransient("parent_" + Task.TRACE_STATE); if (traceParentHeader != null) { final Map traceContextMap = Maps.newMapWithExpectedSize(2); @@ -276,12 +277,12 @@ private Context getParentContext(ThreadContext threadContext) { * However, if a scope is active, then the APM agent can capture additional information, so this method * exists to make it possible to use scopes in the few situation where it makes sense. * - * @param spanId the ID of a currently-open span for which to open a scope. + * @param traceable provides the ID of a currently-open span for which to open a scope. * @return a method to close the scope when you are finished with it. */ @Override - public Releasable withScope(SpanId spanId) { - final Context context = spans.get(spanId); + public Releasable withScope(Traceable traceable) { + final Context context = spans.get(traceable.getSpanId()); if (context != null) { var scope = AccessController.doPrivileged((PrivilegedAction) context::makeCurrent); return scope::close; @@ -327,60 +328,60 @@ private void setSpanAttributes(@Nullable Map spanAttributes, Spa spanBuilder.setAttribute(org.elasticsearch.telemetry.tracing.Tracer.AttributeKeys.CLUSTER_NAME, clusterName); } - private void setSpanAttributes(ThreadContext threadContext, @Nullable Map spanAttributes, SpanBuilder spanBuilder) { + private void setSpanAttributes(TraceContext traceContext, @Nullable Map spanAttributes, SpanBuilder spanBuilder) { setSpanAttributes(spanAttributes, spanBuilder); - final String xOpaqueId = threadContext.getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); + final String xOpaqueId = traceContext.getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); if (xOpaqueId != null) { spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); } } @Override - public void addError(SpanId spanId, Throwable throwable) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void addError(Traceable traceable, Throwable throwable) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.recordException(throwable); } } @Override - public void setAttribute(SpanId spanId, String key, boolean value) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void setAttribute(Traceable traceable, String key, boolean value) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.setAttribute(key, value); } } @Override - public void setAttribute(SpanId spanId, String key, double value) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void setAttribute(Traceable traceable, String key, double value) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.setAttribute(key, value); } } @Override - public void setAttribute(SpanId spanId, String key, long value) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void setAttribute(Traceable traceable, String key, long value) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.setAttribute(key, value); } } @Override - public void setAttribute(SpanId spanId, String key, String value) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void setAttribute(Traceable traceable, String key, String value) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.setAttribute(key, value); } } @Override - public void stopTrace(SpanId spanId) { - final var span = Span.fromContextOrNull(spans.remove(spanId)); + public void stopTrace(Traceable traceable) { + final var span = Span.fromContextOrNull(spans.remove(traceable.getSpanId())); if (span != null) { - logger.trace("Finishing trace [{}]", spanId); + logger.trace("Finishing trace [{}]", traceable); AccessController.doPrivileged((PrivilegedAction) () -> { span.end(); return null; @@ -400,8 +401,8 @@ public void stopTrace() { } @Override - public void addEvent(SpanId spanId, String eventName) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void addEvent(Traceable traceable, String eventName) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.addEvent(eventName); } @@ -425,7 +426,7 @@ private static boolean isSupportedContextKey(String key) { } // VisibleForTesting - Map getSpans() { + Map getSpans() { return spans; } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java index 8cb94b782756d..04a4e1b3f3a34 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java @@ -22,13 +22,14 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; -import org.elasticsearch.telemetry.tracing.SpanId; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.test.ESTestCase; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; @@ -44,9 +45,9 @@ public class APMTracerTests extends ESTestCase { - private static final SpanId SPAN_ID1 = SpanId.forBareString("id1"); - private static final SpanId SPAN_ID2 = SpanId.forBareString("id2"); - private static final SpanId SPAN_ID3 = SpanId.forBareString("id3"); + private static final Traceable TRACEABLE1 = new TestTraceable("id1"); + private static final Traceable TRACEABLE2 = new TestTraceable("id2"); + private static final Traceable TRACEABLE3 = new TestTraceable("id3"); /** * Check that the tracer doesn't create spans when tracing is disabled. @@ -55,7 +56,7 @@ public void test_onTraceStarted_withTracingDisabled_doesNotStartTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -70,7 +71,7 @@ public void test_onTraceStarted_withSpanNameOmitted_doesNotStartTrace() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -82,10 +83,10 @@ public void test_onTraceStarted_startsTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), aMapWithSize(1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); } /** @@ -99,10 +100,10 @@ public void test_onTraceStartedWithStartTime_startsTrace() { // 1_000_000L because of "toNanos" conversions that overflow for large long millis Instant spanStartTime = Instant.ofEpochMilli(randomLongBetween(0, Long.MAX_VALUE / 1_000_000L)); threadContext.putTransient(Task.TRACE_START_TIME, spanStartTime); - apmTracer.startTrace(threadContext, SPAN_ID1, "name1", null); + apmTracer.startTrace(threadContext, TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), aMapWithSize(1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); assertThat(((SpyAPMTracer) apmTracer).getSpanStartTime("name1"), is(spanStartTime)); } @@ -113,8 +114,8 @@ public void test_onTraceStopped_stopsTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); - apmTracer.stopTrace(SPAN_ID1); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); + apmTracer.stopTrace(TRACEABLE1); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -131,7 +132,7 @@ public void test_whenTraceStarted_threadContextIsPopulated() { APMTracer apmTracer = buildTracer(settings); ThreadContext threadContext = new ThreadContext(settings); - apmTracer.startTrace(threadContext, SPAN_ID1, "name1", null); + apmTracer.startTrace(threadContext, TRACEABLE1, "name1", null); assertThat(threadContext.getTransient(Task.APM_TRACE_CONTEXT), notNullValue()); } @@ -152,13 +153,13 @@ public void test_whenTraceStarted_andSpanNameIncluded_thenSpanIsStarted() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID2, "name-bbb", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID3, "name-ccc", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE2, "name-bbb", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE3, "name-ccc", null); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID2)); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID3))); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE2.getSpanId())); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE3.getSpanId()))); } /** @@ -175,7 +176,7 @@ public void test_whenTraceStarted_andSpanNameIncludedAndExcluded_thenSpanIsNotSt .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); assertThat(apmTracer.getSpans(), not(hasKey("id1"))); } @@ -197,13 +198,13 @@ public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID2, "name-bbb", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID3, "name-ccc", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE2, "name-bbb", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE3, "name-ccc", null); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID1))); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID2))); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID3)); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE1.getSpanId()))); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE2.getSpanId()))); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE3.getSpanId())); } /** @@ -360,4 +361,17 @@ public Span startSpan() { } } } + + private static class TestTraceable implements Traceable { + private final String spanId; + + TestTraceable(String spanId) { + this.spanId = Objects.requireNonNull(spanId); + } + + @Override + public String getSpanId() { + return spanId; + } + } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index a26352eb3d8c7..9bdabcede8ec6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -18,8 +18,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -99,6 +102,20 @@ public String toString() { private final Item[] items; private final long tookInMillis; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + for (int i = 0; i < items.length; i++) { + Item item = items[i]; + var r = item.response; + if (r != null) { + r.decRef(); + items[i] = null; + } + } + } + }); + MultiSearchTemplateResponse(StreamInput in) throws IOException { super(in); items = in.readArray(Item::new, Item[]::new); @@ -162,6 +179,26 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + static final class Fields { static final String RESPONSES = "responses"; static final String STATUS = "status"; @@ -179,6 +216,7 @@ public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { if (item.getResponse() != null) { stResponse = new SearchTemplateResponse(); stResponse.setResponse(item.getResponse()); + item.getResponse().incRef(); } templateResponses[i++] = new Item(stResponse, item.getFailure()); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index a602ad23cb178..9451ac089476e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -14,7 +14,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,6 +38,15 @@ public class SearchTemplateResponse extends ActionResponse implements ToXContent /** Contains the search response, if any **/ private SearchResponse response; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + if (response != null) { + response.decRef(); + } + } + }); + SearchTemplateResponse() {} SearchTemplateResponse(StreamInput in) throws IOException { @@ -74,6 +86,26 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(response); } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); Map contentAsMap = parser.map(); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 4b0c365ba8b13..11871978e433a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -80,6 +80,7 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi try { searchRequest = convert(searchTemplateRequest, searchTemplateResponse, scriptService, xContentRegistry, searchUsageHolder); } catch (Exception e) { + searchTemplateResponse.decRef(); items[i] = new MultiSearchTemplateResponse.Item(null, e); if (ExceptionsHelper.status(e).getStatus() >= 500 && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false) { logger.warn("MultiSearchTemplate convert failure", e); @@ -98,12 +99,17 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi MultiSearchResponse.Item item = r.getResponses()[i]; int originalSlot = originalSlots.get(i); if (item.isFailure()) { + var existing = items[originalSlot]; + if (existing.getResponse() != null) { + existing.getResponse().decRef(); + } items[originalSlot] = new MultiSearchTemplateResponse.Item(null, item.getFailure()); } else { items[originalSlot].getResponse().setResponse(item.getResponse()); + item.getResponse().incRef(); } } - l.onResponse(new MultiSearchTemplateResponse(items, r.getTook().millis())); + ActionListener.respondAndRelease(l, new MultiSearchTemplateResponse(items, r.getTook().millis())); })); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 2b315f48dcce4..c6bd2afc64d21 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -71,18 +71,29 @@ public TransportSearchTemplateAction( @Override protected void doExecute(Task task, SearchTemplateRequest request, ActionListener listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); + boolean success = false; try { SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry, searchUsageHolder); if (searchRequest != null) { - client.search(searchRequest, listener.delegateFailureAndWrap((l, searchResponse) -> { + client.search(searchRequest, listener.delegateResponse((l, e) -> { + response.decRef(); + l.onFailure(e); + }).delegateFailureAndWrap((l, searchResponse) -> { response.setResponse(searchResponse); - l.onResponse(response); + searchResponse.incRef(); + ActionListener.respondAndRelease(l, response); })); + success = true; } else { - listener.onResponse(response); + success = true; + ActionListener.respondAndRelease(listener, response); } } catch (IOException e) { listener.onFailure(e); + } finally { + if (success == false) { + response.decRef(); + } } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java index a66f11802c8db..03f2fbd3e81a7 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.ToXContent; @@ -146,7 +147,13 @@ public void testFromXContentWithFailures() throws IOException { this::doParseInstance, this::assertEqualInstances, assertToXContentEquivalence, - ToXContent.EMPTY_PARAMS + ToXContent.EMPTY_PARAMS, + RefCounted::decRef ); } + + @Override + protected void dispose(MultiSearchTemplateResponse instance) { + instance.decRef(); + } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 3a11f2d53a5b9..73c8887669a02 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -125,33 +125,36 @@ protected boolean supportsUnknownFields() { public void testSourceToXContent() throws IOException { SearchTemplateResponse response = new SearchTemplateResponse(); + try { + XContentBuilder source = XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("terms") + .field("status", new String[] { "pending", "published" }) + .endObject() + .endObject() + .endObject(); + response.setSource(BytesReference.bytes(source)); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .startObject("template_output") + .startObject("query") + .startObject("terms") + .field("status", new String[] { "pending", "published" }) + .endObject() + .endObject() + .endObject() + .endObject(); - XContentBuilder source = XContentFactory.jsonBuilder() - .startObject() - .startObject("query") - .startObject("terms") - .field("status", new String[] { "pending", "published" }) - .endObject() - .endObject() - .endObject(); - response.setSource(BytesReference.bytes(source)); - - XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) - .startObject() - .startObject("template_output") - .startObject("query") - .startObject("terms") - .field("status", new String[] { "pending", "published" }) - .endObject() - .endObject() - .endObject() - .endObject(); - - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); - response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); - - assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + } finally { + response.decRef(); + } } public void testSearchResponseToXContent() throws IOException { @@ -177,37 +180,46 @@ public void testSearchResponseToXContent() throws IOException { ); SearchTemplateResponse response = new SearchTemplateResponse(); - response.setResponse(searchResponse); - - XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) - .startObject() - .field("took", 0) - .field("timed_out", false) - .startObject("_shards") - .field("total", 0) - .field("successful", 0) - .field("skipped", 0) - .field("failed", 0) - .endObject() - .startObject("hits") - .startObject("total") - .field("value", 100) - .field("relation", "eq") - .endObject() - .field("max_score", 1.5F) - .startArray("hits") - .startObject() - .field("_id", "id") - .field("_score", 2.0F) - .endObject() - .endArray() - .endObject() - .endObject(); - - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); - response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); - - assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + try { + response.setResponse(searchResponse); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .field("took", 0) + .field("timed_out", false) + .startObject("_shards") + .field("total", 0) + .field("successful", 0) + .field("skipped", 0) + .field("failed", 0) + .endObject() + .startObject("hits") + .startObject("total") + .field("value", 100) + .field("relation", "eq") + .endObject() + .field("max_score", 1.5F) + .startArray("hits") + .startObject() + .field("_id", "id") + .field("_score", 2.0F) + .endObject() + .endArray() + .endObject() + .endObject(); + + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + } finally { + response.decRef(); + } + } + + @Override + protected void dispose(SearchTemplateResponse instance) { + instance.decRef(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 310f9394f60c1..dbc124d40a591 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -108,8 +108,13 @@ public void testIndices() throws Exception { String index1 = "test1"; String index2 = "test2"; internalCluster().ensureAtLeastNumDataNodes(2); - assertAcked(prepareCreate(index1).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "2"))); - assertAcked(prepareCreate(index2).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "2"))); + for (final var index : List.of(index1, index2)) { + final var settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2); + if (randomBoolean()) { + settings.put(IndexMetadata.SETTING_INDEX_HIDDEN, randomBoolean()); + } + assertAcked(prepareCreate(index).setSettings(settings)); + } indexRandomData(index1); indexRandomData(index2); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 21bbd32e6bf26..10fe7982948ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -83,7 +83,7 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - assertResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { + assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { assertThat(resp1.pointInTimeId(), equalTo(pitId)); assertHitCount(resp1, numDocs); }); @@ -99,13 +99,13 @@ public void testBasic() { if (randomBoolean()) { final int delDocCount = deletedDocs; assertNoFailuresAndResponse( - prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()), + prepareSearch("test").setQuery(new MatchAllQueryBuilder()), resp2 -> assertHitCount(resp2, numDocs - delDocCount) ); } try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), resp3 -> { assertHitCount(resp3, numDocs); assertThat(resp3.pointInTimeId(), equalTo(pitId)); @@ -131,7 +131,7 @@ public void testMultipleIndices() { String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { int moreDocs = randomIntBetween(10, 50); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -143,7 +143,7 @@ public void testMultipleIndices() { refresh(); }); assertNoFailuresAndResponse(prepareSearch(), resp -> assertHitCount(resp, numDocs + moreDocs)); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -212,7 +212,7 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -232,7 +232,7 @@ public void testRelocation() throws Exception { } refresh(); } - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -245,7 +245,7 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -263,7 +263,7 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { assertHitCount(resp1, index1); if (rarely()) { try { @@ -280,7 +280,7 @@ public void testPointInTimeNotFound() throws Exception { }); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() + () -> prepareSearch().setPointInTime(new PointInTimeBuilder(pit)).get() ); for (ShardSearchFailure failure : e.shardFailures()) { assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); @@ -306,7 +306,7 @@ public void testIndexNotFound() { String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), + prepareSearch().setPointInTime(new PointInTimeBuilder(pit)), resp -> assertHitCount(resp, index1 + index2) ); indicesAdmin().prepareDelete("index-1").get(); @@ -315,21 +315,15 @@ public void testIndexNotFound() { } // Allow partial search result - assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), - resp -> { - assertFailures(resp); - assertHitCount(resp, index2); - } - ); + assertResponse(prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), resp -> { + assertFailures(resp); + assertHitCount(resp, index2); + }); // Do not allow partial search result expectThrows( ElasticsearchException.class, - () -> prepareSearch().setPreference(null) - .setAllowPartialSearchResults(false) - .setPointInTime(new PointInTimeBuilder(pit)) - .get() + () -> prepareSearch().setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pit)).get() ); } finally { closePointInTime(pit); @@ -365,7 +359,6 @@ public void testCanMatch() throws Exception { assertResponse( prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(randomIntBetween(2, 3)) .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) .setPointInTime(new PointInTimeBuilder(pitId)), @@ -422,20 +415,17 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs1 + numDocs2); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); internalCluster().restartNode(assignedNodeForIndex1); - assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), - resp -> { - assertFailures(resp); - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, numDocs2); - } - ); + assertResponse(prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertFailures(resp); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, numDocs2); + }); } finally { closePointInTime(pitId); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java index 214e3f73144d9..7a8accf8cc7ce 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java @@ -46,8 +46,8 @@ public void testSimpleLocalHealth() { .prepareHealth() .setLocal(true) .setWaitForEvents(Priority.LANGUID) - .setTimeout("30s") - .get("10s"); + .setTimeout(TimeValue.timeValueSeconds(30)) + .get(TimeValue.timeValueSeconds(10)); logger.info("--> got cluster health on [{}]", node); assertFalse("timed out on " + node, health.isTimedOut()); assertThat("health status on " + node, health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index 4aabf0ac66a32..a0efb81c18668 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -109,7 +109,7 @@ static ConflictMode randomMode() { public void testAckedIndexing() throws Exception { final int seconds = (TEST_NIGHTLY && rarely()) == false ? 1 : 5; - final String timeout = seconds + "s"; + final TimeValue timeout = TimeValue.timeValueSeconds(seconds); final List nodes = startCluster(rarely() ? 5 : 3); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index d76031d402af0..fb54de209441a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -473,7 +473,7 @@ public void testScrollAndSearchAfterWithBigIndex() { { OpenPointInTimeRequest openPITRequest = new OpenPointInTimeRequest("test").keepAlive(TimeValue.timeValueMinutes(5)); pitID = client().execute(TransportOpenPointInTimeAction.TYPE, openPITRequest).actionGet().getPointInTimeId(); - SearchRequest searchRequest = new SearchRequest("test").source( + SearchRequest searchRequest = new SearchRequest().source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(pitID).setKeepAlive(TimeValue.timeValueMinutes(5))) .sort("timestamp") ); @@ -509,7 +509,7 @@ public void testScrollAndSearchAfterWithBigIndex() { { OpenPointInTimeRequest openPITRequest = new OpenPointInTimeRequest("test").keepAlive(TimeValue.timeValueMinutes(5)); pitID = client().execute(TransportOpenPointInTimeAction.TYPE, openPITRequest).actionGet().getPointInTimeId(); - SearchRequest searchRequest = new SearchRequest("test").source( + SearchRequest searchRequest = new SearchRequest().source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(pitID).setKeepAlive(TimeValue.timeValueMinutes(5))) .sort(SortBuilders.pitTiebreaker()) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 93340bedbdae3..c6b913185756a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -222,7 +222,7 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie for (int id = 0; id < numSlice; id++) { int numSliceResults = 0; - SearchRequestBuilder request = prepareSearch("test").slice(new SliceBuilder(sliceField, id, numSlice)) + SearchRequestBuilder request = prepareSearch().slice(new SliceBuilder(sliceField, id, numSlice)) .setPointInTime(new PointInTimeBuilder(pointInTimeId)) .addSort(SortBuilders.fieldSort(sortField)) .setSize(randomIntBetween(10, 100)); diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index 215f0bdce57ac..0b8cd149744e3 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -47,8 +47,9 @@ private static class CurrentHolder { // finds the pluggable current build, or uses the local build as a fallback private static Build findCurrent() { - var buildExtension = ExtensionLoader.loadSingleton(ServiceLoader.load(BuildExtension.class), () -> Build::findLocalBuild); - return buildExtension.getCurrentBuild(); + return ExtensionLoader.loadSingleton(ServiceLoader.load(BuildExtension.class)) + .map(BuildExtension::getCurrentBuild) + .orElseGet(Build::findLocalBuild); } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 92bb88f16385d..d3224bb048393 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -109,13 +109,11 @@ public String toString() { private static class CurrentHolder { private static final TransportVersion CURRENT = findCurrent(); - // finds the pluggable current version, or uses the given fallback + // finds the pluggable current version private static TransportVersion findCurrent() { - var versionExtension = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class), () -> null); - if (versionExtension == null) { - return TransportVersions.LATEST_DEFINED; - } - var version = versionExtension.getCurrentTransportVersion(TransportVersions.LATEST_DEFINED); + var version = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) + .map(e -> e.getCurrentTransportVersion(TransportVersions.LATEST_DEFINED)) + .orElse(TransportVersions.LATEST_DEFINED); assert version.onOrAfter(TransportVersions.LATEST_DEFINED); return version; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index bc95caa2d0bf0..3362ea026d64f 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -173,6 +173,8 @@ static TransportVersion def(int id) { public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); + public static final TransportVersion ESQL_STATUS_INCLUDE_LUCENE_QUERIES = def(8_564_00_0); + public static final TransportVersion ESQL_CLUSTER_ALIAS = def(8_565_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ActionFuture.java b/server/src/main/java/org/elasticsearch/action/ActionFuture.java index e51e31f4c03ce..061875e42fec8 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/ActionFuture.java @@ -27,22 +27,6 @@ public interface ActionFuture extends Future { */ T actionGet(); - /** - * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing - * an {@link IllegalStateException} instead. Also catches - * {@link java.util.concurrent.ExecutionException} and throws the actual cause instead. - */ - T actionGet(String timeout); - - /** - * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing - * an {@link IllegalStateException} instead. Also catches - * {@link java.util.concurrent.ExecutionException} and throws the actual cause instead. - * - * @param timeoutMillis Timeout in millis - */ - T actionGet(long timeoutMillis); - /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing * an {@link IllegalStateException} instead. Also catches diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 6209e9fce390e..32d65d743e6a6 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -48,13 +48,6 @@ public Response get(TimeValue timeout) { return execute().actionGet(timeout); } - /** - * Short version of execute().actionGet(). - */ - public Response get(String timeout) { - return execute().actionGet(timeout); - } - public void execute(ActionListener listener) { client.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java index a4a8a475ae8b7..3e63f6f253860 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java @@ -32,7 +32,7 @@ public class IndicesShardStoresRequest extends MasterNodeReadRequest statuses = EnumSet.of(ClusterHealthStatus.YELLOW, ClusterHealthStatus.RED); private int maxConcurrentShardRequests = DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java index f10650a6401d6..83a6870d72491 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java @@ -32,10 +32,10 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; public final class SearchContextId { @@ -110,12 +110,30 @@ public static SearchContextId decode(NamedWriteableRegistry namedWriteableRegist } } + public static String[] decodeIndices(String id) { + try ( + var decodedInputStream = Base64.getUrlDecoder().wrap(new ByteArrayInputStream(id.getBytes(StandardCharsets.ISO_8859_1))); + var in = new InputStreamStreamInput(decodedInputStream) + ) { + final TransportVersion version = TransportVersion.readVersion(in); + in.setTransportVersion(version); + final Map shards = Collections.unmodifiableMap( + in.readCollection(Maps::newHashMapWithExpectedSize, SearchContextId::readShardsMapEntry) + ); + return new SearchContextId(shards, Collections.emptyMap()).getActualIndices(); + } catch (IOException e) { + assert false : e; + throw new IllegalArgumentException(e); + } + } + private static void readShardsMapEntry(StreamInput in, Map shards) throws IOException { shards.put(new ShardId(in), new SearchContextIdForNode(in)); } public String[] getActualIndices() { - final Set indices = new HashSet<>(); + // ensure that the order is consistent + final Set indices = new TreeSet<>(); for (Map.Entry entry : shards().entrySet()) { final String indexName = entry.getKey().getIndexName(); final String clusterAlias = entry.getValue().getClusterAlias(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 7ac8c4d5299d4..456a574c6f6b2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -407,6 +407,21 @@ public ActionRequestValidationException validate() { if (scroll) { validationException = addValidationError("using [point in time] is not allowed in a scroll context", validationException); } + if (indices().length > 0) { + validationException = addValidationError( + "[indices] cannot be used with point in time. Do not specify any index with point in time.", + validationException + ); + } + if (indicesOptions().equals(DEFAULT_INDICES_OPTIONS) == false) { + validationException = addValidationError("[indicesOptions] cannot be used with point in time", validationException); + } + if (routing() != null) { + validationException = addValidationError("[routing] cannot be used with point in time", validationException); + } + if (preference() != null) { + validationException = addValidationError("[preference] cannot be used with point in time", validationException); + } } else if (source != null && source.sorts() != null) { for (SortBuilder sortBuilder : source.sorts()) { if (sortBuilder instanceof FieldSortBuilder diff --git a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java index 721983b6af0e7..e2b8fcbf2825c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java @@ -31,13 +31,15 @@ public class PlainActionFuture implements ActionFuture, ActionListener { @Override - public void onResponse(T result) { + public void onResponse(@Nullable T result) { set(result); } @Override public void onFailure(Exception e) { - setException(e); + if (sync.setException(Objects.requireNonNull(e))) { + done(false); + } } private static final String BLOCKING_OP_REASON = "Blocking operation"; @@ -115,23 +117,9 @@ public boolean cancel(boolean mayInterruptIfRunning) { return false; } done(false); - if (mayInterruptIfRunning) { - interruptTask(); - } return true; } - /** - * Subclasses can override this method to implement interruption of the - * future's computation. The method is invoked automatically by a successful - * call to {@link #cancel(boolean) cancel(true)}. - *

- * The default implementation does nothing. - * - * @since 10.0 - */ - protected void interruptTask() {} - /** * Subclasses should invoke this method to set the result of the computation * to {@code value}. This will set the state of the future to @@ -141,7 +129,7 @@ protected void interruptTask() {} * @param value the value that was the result of the task. * @return true if the state was successfully changed. */ - protected boolean set(@Nullable T value) { + protected final boolean set(@Nullable T value) { boolean result = sync.set(value); if (result) { done(true); @@ -149,33 +137,6 @@ protected boolean set(@Nullable T value) { return result; } - /** - * Subclasses should invoke this method to set the result of the computation - * to an error, {@code throwable}. This will set the state of the future to - * {@link PlainActionFuture.Sync#COMPLETED} and call {@link #done(boolean)} if the - * state was successfully changed. - * - * @param throwable the exception that the task failed with. - * @return true if the state was successfully changed. - * @throws Error if the throwable was an {@link Error}. - */ - protected boolean setException(Throwable throwable) { - boolean result = sync.setException(Objects.requireNonNull(throwable)); - if (result) { - done(false); - } - - // If it's an Error, we want to make sure it reaches the top of the - // call stack, so we rethrow it. - - // we want to notify the listeners we have with errors as well, as it breaks - // how we work in ES in terms of using assertions - // if (throwable instanceof Error) { - // throw (Error) throwable; - // } - return result; - } - /** * Called when the {@link PlainActionFuture} is completed. The {@code success} boolean indicates if the {@link * PlainActionFuture} was successfully completed (the value is {@code true}). In the cases the {@link PlainActionFuture} @@ -194,16 +155,6 @@ public T actionGet() { } } - @Override - public T actionGet(String timeout) { - return actionGet(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".actionGet.timeout")); - } - - @Override - public T actionGet(long timeoutMillis) { - return actionGet(timeoutMillis, TimeUnit.MILLISECONDS); - } - @Override public T actionGet(TimeValue timeout) { return actionGet(timeout.millis(), TimeUnit.MILLISECONDS); @@ -272,7 +223,7 @@ static final class Sync extends AbstractQueuedSynchronizer { static final int CANCELLED = 4; private V value; - private Throwable exception; + private Exception exception; /* * Acquisition succeeds if the future is done, otherwise it fails. @@ -311,7 +262,7 @@ V get(long nanos) throws TimeoutException, CancellationException, ExecutionExcep } /** - * Blocks until {@link #complete(Object, Throwable, int)} has been + * Blocks until {@link #complete(Object, Exception, int)} has been * successfully called. Throws a {@link CancellationException} if the task * was cancelled, or a {@link ExecutionException} if the task completed with * an error. @@ -390,8 +341,8 @@ boolean set(@Nullable V v) { /** * Transition to the COMPLETED state and set the exception. */ - boolean setException(Throwable t) { - return complete(null, t, COMPLETED); + boolean setException(Exception e) { + return complete(null, e, COMPLETED); } /** @@ -409,16 +360,16 @@ boolean cancel() { * final state ({@link #COMPLETED} or {@link #CANCELLED}). * * @param v the value to set as the result of the computation. - * @param t the exception to set as the result of the computation. + * @param e the exception to set as the result of the computation. * @param finalState the state to transition to. */ - private boolean complete(@Nullable V v, @Nullable Throwable t, int finalState) { + private boolean complete(@Nullable V v, @Nullable Exception e, int finalState) { boolean doCompletion = compareAndSetState(RUNNING, COMPLETING); if (doCompletion) { // If this thread successfully transitioned to COMPLETING, set the value // and exception and then release to the final state. this.value = v; - this.exception = t; + this.exception = e; releaseShared(finalState); } else if (getState() == COMPLETING) { // If some other thread is currently completing the future, block until diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 9f852f01397da..33d8fbf99f31f 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -10,9 +10,9 @@ import com.carrotsearch.hppc.ObjectCollection; import com.carrotsearch.hppc.ObjectObjectHashMap; -import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.procedures.ObjectObjectProcedure; +import com.carrotsearch.hppc.procedures.ObjectProcedure; import org.elasticsearch.common.util.Maps; @@ -84,18 +84,9 @@ public boolean containsKey(Object key) { } @Override + @SuppressWarnings("unchecked") public boolean containsValue(Object value) { - for (ObjectCursor cursor : map.values()) { - if (Objects.equals(cursor.value, value)) { - return true; - } - } - return false; - } - - @Override - public VType put(KType key, VType value) { - throw new UnsupportedOperationException("modification is not supported"); + return map.values().contains((VType) value); } @Override @@ -103,16 +94,6 @@ public VType remove(Object key) { throw new UnsupportedOperationException("modification is not supported"); } - @Override - public void putAll(Map m) { - throw new UnsupportedOperationException("modification is not supported"); - } - - @Override - public void clear() { - throw new UnsupportedOperationException("modification is not supported"); - } - @Override public int size() { return map.size(); @@ -146,35 +127,7 @@ public int hashCode() { return super.hashCode(); } - private static final class ConversionIterator implements Iterator> { - - private final Iterator> original; - - ConversionIterator(Iterator> original) { - this.original = original; - } - - @Override - public boolean hasNext() { - return original.hasNext(); - } - - @Override - public Map.Entry next() { - final ObjectObjectCursor obj = original.next(); - if (obj == null) { - return null; - } - return new Maps.ImmutableEntry<>(obj.key, obj.value); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("removal is unsupported"); - } - } - - private static final class EntrySet extends AbstractSet> { + private static class EntrySet extends AbstractSet> { private final ObjectObjectHashMap map; private EntrySet(ObjectObjectHashMap map) { @@ -187,13 +140,23 @@ public int size() { } @Override - public void clear() { - throw new UnsupportedOperationException("removal is unsupported"); + public boolean isEmpty() { + return map.isEmpty(); } @Override public Iterator> iterator() { - return new ConversionIterator<>(map.iterator()); + return Iterators.map(map.iterator(), c -> new Maps.ImmutableEntry<>(c.key, c.value)); + } + + @Override + public Spliterator> spliterator() { + return Spliterators.spliterator(iterator(), size(), Spliterator.IMMUTABLE); + } + + @Override + public void forEach(Consumer> action) { + map.forEach((Consumer>) c -> action.accept(new Maps.ImmutableEntry<>(c.key, c.value))); } @SuppressWarnings("unchecked") @@ -204,70 +167,87 @@ public boolean contains(Object o) { } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); - if (map.containsKey((KType) key) == false) { + Object v = map.get((KType) key); + if (v == null && map.containsKey((KType) key) == false) { return false; } - Object val = map.get((KType) key); - return Objects.equals(val, e.getValue()); + return Objects.equals(v, e.getValue()); } @Override - public boolean remove(Object o) { - throw new UnsupportedOperationException("removal is not supported"); + public String toString() { + return map.toString(); + } + } + + private static class MapObjectCollection extends AbstractCollection { + private final ObjectCollection collection; + + private MapObjectCollection(ObjectCollection collection) { + this.collection = collection; } @Override - public Spliterator> spliterator() { - return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); + public int size() { + return collection.size(); } @Override - public void forEach(Consumer> action) { - map.forEach((Consumer>) ooCursor -> { - Maps.ImmutableEntry entry = new Maps.ImmutableEntry<>(ooCursor.key, ooCursor.value); - action.accept(entry); - }); + public boolean isEmpty() { + return collection.isEmpty(); } - } - private static final class KeySet extends AbstractSet { + @Override + public Iterator iterator() { + return Iterators.map(collection.iterator(), c -> c.value); + } + + @Override + public Spliterator spliterator() { + return Spliterators.spliterator(iterator(), size(), Spliterator.IMMUTABLE); + } - private final ObjectObjectHashMap.KeysContainer keys; + @Override + public void forEach(Consumer action) { + collection.forEach((ObjectProcedure) action::accept); + } - private KeySet(ObjectObjectHashMap.KeysContainer keys) { - this.keys = keys; + @Override + @SuppressWarnings("unchecked") + public boolean contains(Object o) { + return collection.contains((Type) o); } @Override - public Iterator iterator() { - final Iterator> iterator = keys.iterator(); - return new Iterator<>() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public KType next() { - return iterator.next().value; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; + public boolean equals(Object obj) { + return collection.equals(obj); } @Override - public int size() { - return keys.size(); + public int hashCode() { + return collection.hashCode(); + } + + @Override + public String toString() { + return collection.toString(); + } + + @Override + public Object[] toArray() { + return collection.toArray(); } @Override @SuppressWarnings("unchecked") - public boolean contains(Object o) { - return keys.contains((KType) o); + public T[] toArray(T[] a) { + return a.length == 0 ? (T[]) collection.toArray(a.getClass().getComponentType()) : super.toArray(a); + } + } + + private static class KeySet extends MapObjectCollection implements Set { + private KeySet(ObjectObjectHashMap.KeysContainer keys) { + super(keys); } }; @@ -278,17 +258,7 @@ public Set keySet() { @Override public Collection values() { - return new AbstractCollection() { - @Override - public Iterator iterator() { - return ImmutableOpenMap.iterator(map.values()); - } - - @Override - public int size() { - return map.size(); - } - }; + return new MapObjectCollection<>(map.values()); } @Override @@ -296,26 +266,6 @@ public void forEach(BiConsumer action) { map.forEach((ObjectObjectProcedure) action::accept); } - static Iterator iterator(ObjectCollection collection) { - final Iterator> iterator = collection.iterator(); - return new Iterator<>() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public T next() { - return iterator.next().value; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - @Override public String toString() { return map.toString(); @@ -407,9 +357,7 @@ public ImmutableOpenMap build() { */ public Builder putAllFromMap(Map map) { maybeCloneMap(); - for (Map.Entry entry : map.entrySet()) { - this.mutableMap.put(entry.getKey(), entry.getValue()); - } + map.forEach(mutableMap::put); return this; } diff --git a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java index d7c63edac2c94..4b5cef4bbbd45 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java +++ b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java @@ -10,11 +10,13 @@ import org.elasticsearch.core.Nullable; +import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.BiPredicate; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.ToIntFunction; @@ -90,35 +92,19 @@ public T next() { } return value; } - } - - public static Iterator forArray(T[] array) { - return new ArrayIterator<>(array); - } - - private static final class ArrayIterator implements Iterator { - - private final T[] array; - private int index; - - private ArrayIterator(T[] array) { - this.array = Objects.requireNonNull(array, "Unable to iterate over a null array"); - } @Override - public boolean hasNext() { - return index < array.length; - } - - @Override - public T next() { - if (index >= array.length) { - throw new NoSuchElementException(); + public void forEachRemaining(Consumer action) { + while (index < iterators.length) { + iterators[index++].forEachRemaining(action); } - return array[index++]; } } + public static Iterator forArray(T[] array) { + return Arrays.asList(array).iterator(); + } + public static Iterator forRange(int lowerBoundInclusive, int upperBoundExclusive, IntFunction fn) { assert lowerBoundInclusive <= upperBoundExclusive : lowerBoundInclusive + " vs " + upperBoundExclusive; if (upperBoundExclusive <= lowerBoundInclusive) { @@ -183,6 +169,11 @@ public boolean hasNext() { public U next() { return fn.apply(input.next()); } + + @Override + public void forEachRemaining(Consumer action) { + input.forEachRemaining(t -> action.accept(fn.apply(t))); + } } public static Iterator flatMap(Iterator input, Function> fn) { diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java index 1f4ca454b9c8c..1201bab887861 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java @@ -9,8 +9,10 @@ package org.elasticsearch.common.geo; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.geometry.utils.WellKnownText; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -22,6 +24,7 @@ public class GeometryFormatterFactory { public static final String GEOJSON = "geojson"; public static final String WKT = "wkt"; + public static final String WKB = "wkb"; /** * Returns a formatter by name @@ -38,6 +41,11 @@ public static Function, List> getFormatter(String name, Func geometries.forEach((shape) -> objects.add(WellKnownText.toWKT(toGeometry.apply(shape)))); return objects; }; + case WKB -> geometries -> { + final List objects = new ArrayList<>(geometries.size()); + geometries.forEach((shape) -> objects.add(WellKnownBinary.toWKB(toGeometry.apply(shape), ByteOrder.LITTLE_ENDIAN))); + return objects; + }; default -> throw new IllegalArgumentException("Unrecognized geometry format [" + name + "]."); }; } diff --git a/server/src/main/java/org/elasticsearch/common/util/BitArray.java b/server/src/main/java/org/elasticsearch/common/util/BitArray.java index 696e81b3beec9..96c00538f07d4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BitArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BitArray.java @@ -129,6 +129,10 @@ public boolean get(long index) { return (bits.get(wordNum) & bitmask) != 0; } + public long size() { + return bits.size() * (long) Long.BYTES * Byte.SIZE; + } + private static long wordNum(long index) { return index >> 6; } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 7bfba1ebdb176..7caf570806c0e 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -23,6 +23,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.tasks.Task; +import org.elasticsearch.telemetry.tracing.TraceContext; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -69,7 +70,7 @@ * * */ -public final class ThreadContext implements Writeable { +public final class ThreadContext implements Writeable, TraceContext { public static final String PREFIX = "request.headers"; public static final Setting DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", Property.NodeScope); diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 24df7875f7e3d..f4dbf8115da33 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -26,7 +26,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.telemetry.tracing.SpanId; import org.elasticsearch.telemetry.tracing.Tracer; import java.util.ArrayList; @@ -91,8 +90,6 @@ public void sendResponse(RestResponse restResponse) { // We're sending a response so we know we won't be needing the request content again and release it httpRequest.release(); - final SpanId spanId = SpanId.forRestRequest(request); - final ArrayList toClose = new ArrayList<>(4); if (HttpUtils.shouldCloseConnection(httpRequest)) { toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); @@ -174,9 +171,9 @@ public void sendResponse(RestResponse restResponse) { addCookies(httpResponse); - tracer.setAttribute(spanId, "http.status_code", restResponse.status().getStatus()); + tracer.setAttribute(request, "http.status_code", restResponse.status().getStatus()); restResponse.getHeaders() - .forEach((key, values) -> tracer.setAttribute(spanId, "http.response.headers." + key, String.join("; ", values))); + .forEach((key, values) -> tracer.setAttribute(request, "http.response.headers." + key, String.join("; ", values))); ActionListener listener = ActionListener.releasing(Releasables.wrap(toClose)); if (httpLogger != null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index 765cc256d84b1..f4edb8b1d4039 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -56,13 +56,11 @@ public record IndexVersion(int id, Version luceneVersion) implements VersionId null); - if (versionExtension == null) { - return IndexVersions.LATEST_DEFINED; - } - var version = versionExtension.getCurrentIndexVersion(IndexVersions.LATEST_DEFINED); + var version = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) + .map(e -> e.getCurrentIndexVersion(IndexVersions.LATEST_DEFINED)) + .orElse(IndexVersions.LATEST_DEFINED); assert version.onOrAfter(IndexVersions.LATEST_DEFINED); assert version.luceneVersion.equals(Version.LATEST) diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 2e3bc0935c6e3..a7d93ec7e7d80 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -62,7 +62,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.get.total", + "es.indices.get.total", "Total number of get operations", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getCount()) @@ -71,7 +71,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.get.time", + "es.indices.get.time", "Time in milliseconds spent performing get operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getTimeInMillis()) @@ -80,7 +80,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.search.fetch.total", + "es.indices.search.fetch.total", "Total number of fetch operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchCount()) @@ -89,7 +89,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.search.fetch.time", + "es.indices.search.fetch.time", "Time in milliseconds spent performing fetch operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchTimeInMillis()) @@ -98,7 +98,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.merge.total", + "es.indices.merge.total", "Total number of merge operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotal()) @@ -107,7 +107,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.merge.time", + "es.indices.merge.time", "Time in milliseconds spent performing merge operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotalTimeInMillis()) @@ -116,7 +116,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.operations", + "es.translog.operations.count", "Number of transaction log operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) @@ -125,7 +125,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.size", + "es.translog.size", "Size, in bytes, of the transaction log.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getTranslogSizeInBytes()) @@ -134,7 +134,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.uncommitted_operations", + "es.translog.uncommitted_operations.count", "Number of uncommitted transaction log operations.", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) @@ -143,7 +143,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.uncommitted_size", + "es.translog.uncommitted_operations.size", "Size, in bytes, of uncommitted transaction log operations.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedSizeInBytes()) @@ -152,7 +152,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.translog.earliest_last_modified_age", + "es.translog.earliest_last_modified.time", "Earliest last modified age for the transaction log.", "time", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getEarliestLastModifiedAge()) @@ -161,7 +161,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.transport.rx_size", + "es.transport.rx.size", "Size, in bytes, of RX packets received by the node during internal cluster communication.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getRxSize().getBytes()) @@ -170,7 +170,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.transport.tx_size", + "es.transport.tx.size", "Size, in bytes, of TX packets sent by the node during internal cluster communication.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getTxSize().getBytes()) @@ -179,7 +179,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.young.used", + "es.jvm.mem.pools.young.size", "Memory, in bytes, used by the young generation heap.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.YOUNG)) @@ -188,7 +188,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.survivor.used", + "es.jvm.mem.pools.survivor.size", "Memory, in bytes, used by the survivor space.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.SURVIVOR)) @@ -197,7 +197,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.old.used", + "es.jvm.mem.pools.old.size", "Memory, in bytes, used by the old generation heap.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.OLD)) @@ -206,7 +206,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.fs.io_stats.io_time.total", + "es.fs.io_stats.time.total", "The total time in millis spent performing I/O operations across all devices used by Elasticsearch.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getFs().getIoStats().getTotalIOTimeMillis()) @@ -215,7 +215,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.docs.total", + "es.indexing.docs.total", "Total number of indexed documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCount()) @@ -224,7 +224,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.docs.current", + "es.indexing.docs.count", "Current number of indexing documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCurrent()) @@ -233,7 +233,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.failed.total", + "es.indices.indexing.failed.total", "Total number of failed indexing operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexFailedCount()) @@ -242,7 +242,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.deletion.docs.total", + "es.indices.deletion.docs.total", "Total number of deleted documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCount()) @@ -251,7 +251,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.deletion.docs.current", + "es.indices.deletion.docs.count", "Current number of deleting documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCurrent()) @@ -260,7 +260,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.time", + "es.indices.indexing.time", "Total indices indexing time", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexTime().millis()) @@ -269,7 +269,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.deletion.time", + "es.indices.deletion.time", "Total indices deletion time", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteTime().millis()) @@ -278,7 +278,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.throttle.time", + "es.indices.throttle.time", "Total indices throttle time", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getThrottleTime().millis()) @@ -287,7 +287,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.noop.total", + "es.indices.noop.total", "Total number of noop shard operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getNoopUpdateCount()) @@ -296,7 +296,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.coordinating_operations.memory.size.total", + "es.indexing.coordinating_operations.size", "Total number of memory bytes consumed by coordinating operations", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingBytes()) @@ -305,7 +305,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.coordinating_operations.count.total", + "es.indexing.coordinating_operations.total", "Total number of coordinating operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingOps()) @@ -314,7 +314,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.coordinating_operations.memory.size.current", + "es.indexing.coordinating_operations.size", "Current number of memory bytes consumed by coordinating operations", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingBytes()) @@ -323,7 +323,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.coordinating_operations.count.current", + "es.indexing.coordinating_operations.count", "Current number of coordinating operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingOps()) @@ -332,7 +332,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.coordinating_operations.rejections.total", + "es.indexing.coordinating_operations.rejections.total", "Total number of coordinating operations rejections", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCoordinatingRejections()) @@ -341,7 +341,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.primary_operations.memory.size.total", + "es.indexing.primary_operations.size", "Total number of memory bytes consumed by primary operations", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryBytes()) @@ -350,7 +350,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.primary_operations.count.total", + "es.indexing.primary_operations.total", "Total number of primary operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryOps()) @@ -359,7 +359,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.primary_operations.memory.size.current", + "es.indexing.primary_operations.size", "Current number of memory bytes consumed by primary operations", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryBytes()) @@ -368,7 +368,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.primary_operations.count.current", + "es.indexing.primary_operations.count", "Current number of primary operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryOps()) @@ -377,7 +377,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.indexing.primary_operations.rejections.total", + "es.indexing.primary_operations.rejections.total", "Total number of primary operations rejections", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getPrimaryRejections()) @@ -386,7 +386,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.indexing.memory.limit.current", + "es.indexing.memory.limit.size", "Current memory limit for primary and coordinating operations", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getMemoryLimit()) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 7b9b71ae4df27..0de5657c0cb1a 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -202,6 +202,7 @@ import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -384,6 +385,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); + logger.info("Default Locale [{}]", Locale.getDefault()); if (Build.current().isProductionRelease() == false) { logger.warn( "version [{}] is a pre-release version of Elasticsearch and is not suitable for production", diff --git a/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java b/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java index 7dfb64c989ea2..5cf5f1b92e472 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java @@ -8,9 +8,10 @@ package org.elasticsearch.plugins; -import java.util.Locale; +import org.elasticsearch.core.Strings; + +import java.util.Optional; import java.util.ServiceLoader; -import java.util.function.Supplier; /** * A utility for loading SPI extensions. @@ -20,8 +21,7 @@ public class ExtensionLoader { /** * Loads a single SPI extension. * - * There should be no more than one extension found. If no service providers - * are found, the supplied fallback is used. + * There should be no more than one extension found. * * Note: A ServiceLoader is needed rather than the service class because ServiceLoaders * must be loaded by a module with the {@code uses} declaration. Since this @@ -29,21 +29,22 @@ public class ExtensionLoader { * service classes it may load. Thus, the caller must load the ServiceLoader. * * @param loader a service loader instance to find the singleton extension in - * @param fallback a supplier for an instance if no extensions are found * @return an instance of the extension * @param the SPI extension type */ - public static T loadSingleton(ServiceLoader loader, Supplier fallback) { - var extensions = loader.stream().toList(); - if (extensions.size() > 1) { + public static Optional loadSingleton(ServiceLoader loader) { + var extensions = loader.iterator(); + if (extensions.hasNext() == false) { + return Optional.empty(); + } + var ext = extensions.next(); + if (extensions.hasNext()) { // It would be really nice to give the actual extension class here directly, but that would require passing it // in effectively twice in the call site, once to ServiceLoader, and then to this method directly as well. // It's annoying that ServiceLoader hangs onto the service class, but does not expose it. It does at least // print the service class from its toString, which is better tha nothing - throw new IllegalStateException(String.format(Locale.ROOT, "More than one extension found for %s", loader)); - } else if (extensions.isEmpty()) { - return fallback.get(); + throw new IllegalStateException(Strings.format("More than one extension found for %s", loader)); } - return extensions.get(0).get(); + return Optional.of(ext); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 4fe56e897054f..09eb83d109e3e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContent; @@ -46,7 +47,7 @@ import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.core.TimeValue.parseTimeValue; -public class RestRequest implements ToXContent.Params { +public class RestRequest implements ToXContent.Params, Traceable { public static final String RESPONSE_RESTRICTED = "responseRestricted"; // tchar pattern as defined by RFC7230 section 3.2.6 @@ -626,6 +627,11 @@ public void markResponseRestricted(String restriction) { consumedParams.add(RESPONSE_RESTRICTED); } + @Override + public String getSpanId() { + return "rest-" + getRequestId(); + } + public static class MediaTypeHeaderException extends RuntimeException { private final String message; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index c232e1a30c553..a881b2497b26c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -142,7 +142,7 @@ public static MultiSearchRequest parseRequest( searchRequest.source(new SearchSourceBuilder().parseXContent(parser, false, searchUsageHolder)); RestSearchAction.validateSearchRequest(restRequest, searchRequest); if (searchRequest.pointInTimeBuilder() != null) { - RestSearchAction.preparePointInTime(searchRequest, restRequest, namedWriteableRegistry); + RestSearchAction.preparePointInTime(searchRequest, restRequest); } else { searchRequest.setCcsMinimizeRoundtrips( restRequest.paramAsBoolean("ccs_minimize_roundtrips", searchRequest.isCcsMinimizeRoundtrips()) diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 41102a3568e30..711aec182525e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.search.SearchContextId; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; @@ -50,7 +49,6 @@ import java.util.function.IntConsumer; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.action.search.SearchRequest.DEFAULT_INDICES_OPTIONS; import static org.elasticsearch.core.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -220,7 +218,7 @@ public static void parseSearchRequest( validateSearchRequest(request, searchRequest); if (searchRequest.pointInTimeBuilder() != null) { - preparePointInTime(searchRequest, request, namedWriteableRegistry); + preparePointInTime(searchRequest, request); } else { searchRequest.setCcsMinimizeRoundtrips( request.paramAsBoolean("ccs_minimize_roundtrips", searchRequest.isCcsMinimizeRoundtrips()) @@ -373,44 +371,14 @@ static SuggestBuilder parseSuggestUrlParameters(RestRequest request) { return null; } - static void preparePointInTime(SearchRequest request, RestRequest restRequest, NamedWriteableRegistry namedWriteableRegistry) { + static void preparePointInTime(SearchRequest request, RestRequest restRequest) { assert request.pointInTimeBuilder() != null; ActionRequestValidationException validationException = null; - if (request.indices().length > 0) { - validationException = addValidationError( - "[indices] cannot be used with point in time. Do not specify any index with point in time.", - validationException - ); - } - if (request.indicesOptions().equals(DEFAULT_INDICES_OPTIONS) == false) { - validationException = addValidationError("[indicesOptions] cannot be used with point in time", validationException); - } - if (request.routing() != null) { - validationException = addValidationError("[routing] cannot be used with point in time", validationException); - } - if (request.preference() != null) { - validationException = addValidationError("[preference] cannot be used with point in time", validationException); - } if (restRequest.paramAsBoolean("ccs_minimize_roundtrips", false)) { validationException = addValidationError("[ccs_minimize_roundtrips] cannot be used with point in time", validationException); request.setCcsMinimizeRoundtrips(false); } ExceptionsHelper.reThrowIfNotNull(validationException); - - final IndicesOptions indicesOptions = request.indicesOptions(); - final IndicesOptions stricterIndicesOptions = IndicesOptions.fromOptions( - indicesOptions.ignoreUnavailable(), - indicesOptions.allowNoIndices(), - false, - false, - false, - true, - true, - indicesOptions.ignoreThrottled() - ); - request.indicesOptions(stricterIndicesOptions); - final SearchContextId searchContextId = request.pointInTimeBuilder().getSearchContextId(namedWriteableRegistry); - request.indices(searchContextId.getActualIndices()); } /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 9e59bfda96d19..8a03c7e9f08ba 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -115,7 +115,6 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.telemetry.tracing.SpanId; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.Scheduler.Cancellable; @@ -493,7 +492,7 @@ public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, Ac private DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchShardTask task) throws IOException { ReaderContext readerContext = createOrGetReaderContext(request); try (@SuppressWarnings("unused") // withScope call is necessary to instrument search execution - Releasable scope = tracer.withScope(SpanId.forTask(task)); + Releasable scope = tracer.withScope(task); Releasable ignored = readerContext.markAsUsed(getKeepAlive(request)); SearchContext context = createContext(readerContext, request, task, ResultsType.DFS, false) ) { @@ -665,9 +664,8 @@ private static void runAsync( */ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchShardTask task) throws Exception { final ReaderContext readerContext = createOrGetReaderContext(request); - SpanId spanId = SpanId.forTask(task); try ( - Releasable scope = tracer.withScope(spanId); + Releasable scope = tracer.withScope(task); Releasable ignored = readerContext.markAsUsed(getKeepAlive(request)); SearchContext context = createContext(readerContext, request, task, ResultsType.QUERY, true) ) { @@ -680,7 +678,7 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh } afterQueryTime = executor.success(); } finally { - tracer.stopTrace(spanId); + tracer.stopTrace(task); } if (request.numberOfShards() == 1 && (request.source() == null || request.source().rankBuilder() == null)) { // we already have query results, but we can run fetch at the same time @@ -711,7 +709,7 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchContext context, long afterQueryTime) { try ( - Releasable scope = tracer.withScope(SpanId.forTask(context.getTask())); + Releasable scope = tracer.withScope(context.getTask()); SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime) ) { fetchPhase.execute(context, shortcutDocIdsToLoad(context)); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 01988003f4dd0..18ae708d8fec3 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -182,6 +182,10 @@ public static long computeWaitForCheckpoint(Map indexToWaitForCh } public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) { + this(shardId, nowInMillis, aliasFilter, null); + } + + public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter, String clusterAlias) { this( OriginalIndices.NONE, shardId, @@ -195,7 +199,7 @@ public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFi true, null, nowInMillis, - null, + clusterAlias, null, null, SequenceNumbers.UNASSIGNED_SEQ_NO, diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 3726ba265e433..83ee08574df4e 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -21,7 +22,7 @@ /** * Current task information */ -public class Task { +public class Task implements Traceable { /** * The request header to mark tasks with specific ids @@ -265,4 +266,9 @@ public TaskResult result(DiscoveryNode node, ActionResponse response) throws IOE throw new IllegalStateException("response has to implement ToXContent to be able to store the results"); } } + + @Override + public String getSpanId() { + return "task-" + getId(); + } } diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java deleted file mode 100644 index 8a22102baadf9..0000000000000 --- a/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.telemetry.tracing; - -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.tasks.Task; - -import java.util.Objects; - -public class SpanId { - private final String rawId; - - private SpanId(String rawId) { - this.rawId = Objects.requireNonNull(rawId); - } - - public String getRawId() { - return rawId; - } - - @Override - public String toString() { - return "SpanId[" + rawId + "]"; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SpanId spanId = (SpanId) o; - return rawId.equals(spanId.rawId); - } - - @Override - public int hashCode() { - return Objects.hash(rawId); - } - - public static SpanId forTask(Task task) { - return new SpanId("task-" + task.getId()); - } - - public static SpanId forRestRequest(RestRequest restRequest) { - return new SpanId("rest-" + restRequest.getRequestId()); - } - - public static SpanId forBareString(String rawId) { - return new SpanId(rawId); - } -} diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/TraceContext.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/TraceContext.java new file mode 100644 index 0000000000000..197b4f96acd5b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/tracing/TraceContext.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.tracing; + +/** + * Required methods from ThreadContext for Tracer + */ +public interface TraceContext { + /** + * Returns a transient header object or null if there is no header for the given key + */ + T getTransient(String key); + + /** + * Puts a transient header object into this context + */ + void putTransient(String key, Object value); + + /** + * Returns the header for the given key or null if not present + */ + String getHeader(String key); + + /** + * Puts a header into the context + */ + void putHeader(String key, String value); +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java new file mode 100644 index 0000000000000..64c8635d75dd8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.tracing; + +/** + * A class that can be traced using the telemetry tracing API + */ +public interface Traceable { + /** + * A consistent id for the span. Should be structured "[short-name]-[unique-id]" ie "request-abc1234" + */ + String getSpanId(); +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java index f54857091b778..6f2c98dda4e2b 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java @@ -8,10 +8,7 @@ package org.elasticsearch.telemetry.tracing; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Releasable; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.tasks.Task; import java.util.Map; @@ -37,27 +34,13 @@ public interface Tracer { /** * Called when a span starts. - * @param threadContext the current context. Required for tracing parent/child span activity. - * @param spanId a unique identifier for the activity, and will not be sent to the tracing system. Add the ID - * to the attributes if it is important + * @param traceContext the current context. Required for tracing parent/child span activity. + * @param traceable provides a unique identifier for the activity, and will not be sent to the tracing system. Add the ID + * to the attributes if it is important * @param name the name of the span. Used to filter out spans, but also sent to the tracing system * @param attributes arbitrary key/value data for the span. Sent to the tracing system */ - void startTrace(ThreadContext threadContext, SpanId spanId, String name, Map attributes); - - /** - * @see Tracer#startTrace(ThreadContext, SpanId, String, Map) - */ - default void startTrace(ThreadContext threadContext, Task task, String name, Map attributes) { - startTrace(threadContext, SpanId.forTask(task), name, attributes); - } - - /** - * @see Tracer#startTrace(ThreadContext, SpanId, String, Map) - */ - default void startTrace(ThreadContext threadContext, RestRequest restRequest, String name, Map attributes) { - startTrace(threadContext, SpanId.forRestRequest(restRequest), name, attributes); - } + void startTrace(TraceContext traceContext, Traceable traceable, String name, Map attributes); /** * Called when a span starts. This version of the method relies on context to assign the span a parent. @@ -67,23 +50,9 @@ default void startTrace(ThreadContext threadContext, RestRequest restRequest, St /** * Called when a span ends. - * @param spanId an identifier for the span - */ - void stopTrace(SpanId spanId); - - /** - * @see Tracer#stopTrace(SpanId) - */ - default void stopTrace(Task task) { - stopTrace(SpanId.forTask(task)); - } - - /** - * @see Tracer#stopTrace(SpanId) + * @param traceable provides an identifier for the span */ - default void stopTrace(RestRequest restRequest) { - stopTrace(SpanId.forRestRequest(restRequest)); - } + void stopTrace(Traceable traceable); /** * Called when a span ends. This version of the method relies on context to select the span to stop. @@ -94,58 +63,51 @@ default void stopTrace(RestRequest restRequest) { * Some tracing implementations support the concept of "events" within a span, marking a point in time during the span * when something interesting happened. If the tracing implementation doesn't support events, then nothing will be recorded. * This should only be called when a trace already been started on the {@code traceable}. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param eventName the event that happened. This should be something meaningful to people reviewing the data, for example * "send response", "finished processing", "validated request", etc. */ - void addEvent(SpanId spanId, String eventName); + void addEvent(Traceable traceable, String eventName); /** * If an exception occurs during a span, you can add data about the exception to the span where the exception occurred. * This should only be called when a span has been started, otherwise it has no effect. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param throwable the exception that occurred. */ - void addError(SpanId spanId, Throwable throwable); - - /** - * @see Tracer#addError(SpanId, Throwable) - */ - default void addError(RestRequest restRequest, Throwable throwable) { - addError(SpanId.forRestRequest(restRequest), throwable); - } + void addError(Traceable traceable, Throwable throwable); /** * Adds a boolean attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, boolean value); + void setAttribute(Traceable traceable, String key, boolean value); /** * Adds a double attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, double value); + void setAttribute(Traceable traceable, String key, double value); /** * Adds a long attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, long value); + void setAttribute(Traceable traceable, String key, long value); /** * Adds a String attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, String value); + void setAttribute(Traceable traceable, String key, String value); /** * Usually you won't need this about scopes when using tracing. However, @@ -172,10 +134,10 @@ default void addError(RestRequest restRequest, Throwable throwable) { *

Nonetheless, it is possible to manually use scope where more detail is needed by * explicitly opening a scope via the `Tracer`. * - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @return a scope. You MUST close it when you are finished with it. */ - Releasable withScope(SpanId spanId); + Releasable withScope(Traceable traceable); /** * A Tracer implementation that does nothing. This is used when no tracer is configured, @@ -183,52 +145,37 @@ default void addError(RestRequest restRequest, Throwable throwable) { */ Tracer NOOP = new Tracer() { @Override - public void startTrace(ThreadContext threadContext, SpanId spanId, String name, Map attributes) {} - - @Override - public void startTrace(ThreadContext threadContext, Task task, String name, Map attributes) {} - - @Override - public void startTrace(ThreadContext threadContext, RestRequest restRequest, String name, Map attributes) {} + public void startTrace(TraceContext traceContext, Traceable traceable, String name, Map attributes) {} @Override public void startTrace(String name, Map attributes) {} @Override - public void stopTrace(SpanId spanId) {} - - @Override - public void stopTrace(Task task) {} - - @Override - public void stopTrace(RestRequest restRequest) {} + public void stopTrace(Traceable traceable) {} @Override public void stopTrace() {} @Override - public void addEvent(SpanId spanId, String eventName) {} - - @Override - public void addError(SpanId spanId, Throwable throwable) {} + public void addEvent(Traceable traceable, String eventName) {} @Override - public void addError(RestRequest restRequest, Throwable throwable) {} + public void addError(Traceable traceable, Throwable throwable) {} @Override - public void setAttribute(SpanId spanId, String key, boolean value) {} + public void setAttribute(Traceable traceable, String key, boolean value) {} @Override - public void setAttribute(SpanId spanId, String key, double value) {} + public void setAttribute(Traceable traceable, String key, double value) {} @Override - public void setAttribute(SpanId spanId, String key, long value) {} + public void setAttribute(Traceable traceable, String key, long value) {} @Override - public void setAttribute(SpanId spanId, String key, String value) {} + public void setAttribute(Traceable traceable, String key, String value) {} @Override - public Releasable withScope(SpanId spanId) { + public Releasable withScope(Traceable traceable) { return () -> {}; } }; diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java index 32a5e196c3a0b..064e868970ef5 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java @@ -29,7 +29,7 @@ public RemoteClusterCredentialsManager(Settings settings) { updateClusterCredentials(settings); } - public void updateClusterCredentials(Settings settings) { + public final void updateClusterCredentials(Settings settings) { clusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings); logger.debug( () -> Strings.format( diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java index dfafcd0662290..f26cdfe2bea6c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java @@ -67,9 +67,8 @@ void runTest() { request, future ); - assertTrue(future.isDone()); - final var response = future.actionGet(0L); + final var response = future.result(); assertThat(response.getFailures(), empty()); assertThat(response.getStoreStatuses(), anEmptyMap()); assertThat(shardsWithFailures, empty()); @@ -132,8 +131,7 @@ void runTest() { listExpected = false; assertFalse(future.isDone()); deterministicTaskQueue.runAllTasks(); - assertTrue(future.isDone()); - expectThrows(TaskCancelledException.class, () -> future.actionGet(0L)); + expectThrows(TaskCancelledException.class, future::result); } }); } @@ -153,9 +151,8 @@ void runTest() { assertFalse(future.isDone()); failOneRequest = true; deterministicTaskQueue.runAllTasks(); - assertTrue(future.isDone()); assertFalse(failOneRequest); - assertEquals("simulated", expectThrows(ElasticsearchException.class, () -> future.actionGet(0L)).getMessage()); + assertEquals("simulated", expectThrows(ElasticsearchException.class, future::result).getMessage()); } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java index 90ac90738837d..32091780484fa 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java @@ -85,5 +85,11 @@ public void testEncode() { assertThat(node3.getNode(), equalTo("node_3")); assertThat(node3.getSearchContextId().getId(), equalTo(42L)); assertThat(node3.getSearchContextId().getSessionId(), equalTo("c")); + + final String[] indices = SearchContextId.decodeIndices(id); + assertThat(indices.length, equalTo(3)); + assertThat(indices[0], equalTo("cluster_x:idx")); + assertThat(indices[1], equalTo("cluster_y:idy")); + assertThat(indices[2], equalTo("idy")); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 0c8496081ff19..8c0ffeabf0ea6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -457,6 +457,42 @@ public void testValidate() throws IOException { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("[rank] requires [explain] is [false]", validationErrors.validationErrors().get(0)); } + { + SearchRequest searchRequest = new SearchRequest("test").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("")) + ); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals( + "[indices] cannot be used with point in time. Do not specify any index with point in time.", + validationErrors.validationErrors().get(0) + ); + } + { + SearchRequest searchRequest = new SearchRequest().indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED) + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[indicesOptions] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } + { + SearchRequest searchRequest = new SearchRequest().routing("route1") + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[routing] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } + { + SearchRequest searchRequest = new SearchRequest().preference("pref1") + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[preference] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } } public void testCopyConstructor() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java index abc482a34a070..2ca914eb23c61 100644 --- a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java @@ -34,13 +34,11 @@ public void onResponse(Object value) { // test all possible methods that can be interrupted final Runnable runnable = () -> { - final int method = randomIntBetween(0, 4); + final int method = randomIntBetween(0, 2); switch (method) { case 0 -> future.actionGet(); - case 1 -> future.actionGet("30s"); - case 2 -> future.actionGet(30000); - case 3 -> future.actionGet(TimeValue.timeValueSeconds(30)); - case 4 -> future.actionGet(30, TimeUnit.SECONDS); + case 1 -> future.actionGet(TimeValue.timeValueSeconds(30)); + case 2 -> future.actionGet(30, TimeUnit.SECONDS); default -> throw new AssertionError(method); } }; diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index a787a50798e05..8bda62b91bc7e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -305,7 +305,7 @@ public BaseBroadcastResponse executeAndAssertImmediateResponse( ) { PlainActionFuture response = new PlainActionFuture<>(); ActionTestUtils.execute(broadcastAction, null, request, response); - return response.actionGet("5s"); + return response.actionGet(5, TimeUnit.SECONDS); } private void assertBroadcastResponse(int total, int successful, int failed, BaseBroadcastResponse response, Class exceptionClass) { diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 409023afc4576..2a2986d974b0d 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -495,7 +495,7 @@ public void testTraceStopped() { executeRequest(Settings.EMPTY, "request-host"); - verify(tracer).setAttribute(argThat(id -> id.getRawId().startsWith("rest-")), eq("http.status_code"), eq(200L)); + verify(tracer).setAttribute(argThat(id -> id.getSpanId().startsWith("rest-")), eq("http.status_code"), eq(200L)); verify(tracer).stopTrace(any(RestRequest.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index aa4dec379f085..8627a236d6401 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -11,15 +11,19 @@ import org.apache.lucene.tests.geo.GeoTestUtil; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.SimpleFeatureFactory; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.script.ScriptCompiler; -import org.hamcrest.Matchers; import java.io.IOException; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class GeoPointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { @@ -36,31 +40,50 @@ public void testFetchSourceValue() throws IOException { Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); String wktPoint = "POINT (42.0 27.1)"; String otherWktPoint = "POINT (30.0 50.0)"; + byte[] wkbPoint = WellKnownBinary.toWKB(new Point(42.0, 27.1), ByteOrder.LITTLE_ENDIAN); + byte[] otherWkbPoint = WellKnownBinary.toWKB(new Point(30.0, 50.0), ByteOrder.LITTLE_ENDIAN); // Test a single point in [lon, lat] array format. Object sourceValue = List.of(42.0, 27.1); assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a single point in "lat, lon" string format. sourceValue = "27.1,42.0"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a list of points in [lon, lat] array format. sourceValue = List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a list of points in [lat,lon] array format with one malformed sourceValue = List.of(List.of(42.0, 27.1), List.of("a", "b"), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a single point in well-known text format. sourceValue = "POINT (42.0 27.1)"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a malformed value sourceValue = "malformed"; @@ -71,9 +94,13 @@ public void testFetchSourceValue() throws IOException { if (ignoreMalformed) { assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); } else { assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); } // test single point in GeoJSON format @@ -110,13 +137,13 @@ public void testFetchVectorTile() throws IOException { final double lat = GeoTestUtil.nextLatitude(); final double lon = GeoTestUtil.nextLongitude(); List sourceValue = fetchSourceValue(mapper, List.of(lon, lat), mvtString); - assertThat(sourceValue.size(), Matchers.equalTo(1)); - assertThat(sourceValue.get(0), Matchers.equalTo(featureFactory.point(lon, lat))); + assertThat(sourceValue.size(), equalTo(1)); + assertThat(sourceValue.get(0), equalTo(featureFactory.point(lon, lat))); geoPoints.add(new GeoPoint(lat, lon)); values.add(List.of(lon, lat)); } List sourceValue = fetchSourceValue(mapper, values, mvtString); - assertThat(sourceValue.size(), Matchers.equalTo(1)); - assertThat(sourceValue.get(0), Matchers.equalTo(featureFactory.points(geoPoints))); + assertThat(sourceValue.size(), equalTo(1)); + assertThat(sourceValue.get(0), equalTo(featureFactory.points(geoPoints))); } } diff --git a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java index 35f65ebedf9b9..f9647c27e0acb 100644 --- a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java @@ -20,10 +20,12 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.ServiceLoader; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -68,21 +70,16 @@ public int getValue() { """, name, value); } - public void testNoProviderNullFallback() { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class), () -> null); - assertThat(service, nullValue()); - } - public void testNoProvider() { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class), () -> () -> 2); - assertThat(service, not(nullValue())); - assertThat(service.getValue(), equalTo(2)); + Optional service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class)); + assertThat(service.isEmpty(), is(true)); } public void testOneProvider() throws Exception { Map sources = Map.of("p.FooService", defineProvider("FooService", 1)); try (var loader = buildProviderJar(sources)) { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader()), () -> null); + TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader())) + .orElseThrow(AssertionError::new); assertThat(service, not(nullValue())); assertThat(service.getValue(), equalTo(1)); } @@ -98,7 +95,7 @@ public void testManyProviders() throws Exception { try (var loader = buildProviderJar(sources)) { var e = expectThrows( IllegalStateException.class, - () -> ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader()), () -> null) + () -> ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader())) ); assertThat(e.getMessage(), containsString("More than one extension found")); assertThat(e.getMessage(), containsString("TestService")); diff --git a/test/fixtures/testcontainer-utils/build.gradle b/test/fixtures/testcontainer-utils/build.gradle index 88cc17de04a73..3766722abcd65 100644 --- a/test/fixtures/testcontainer-utils/build.gradle +++ b/test/fixtures/testcontainer-utils/build.gradle @@ -12,6 +12,7 @@ dependencies { api "com.github.docker-java:docker-java-api:${versions.dockerJava}" implementation "org.slf4j:slf4j-api:${versions.slf4j}" runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 0a0592b5a01f2..5f6e50a7c83e0 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -702,5 +702,11 @@ public void addWithoutBreaking(long bytes) { public long getUsed() { return used.get(); } + + @Override + public String toString() { + long u = used.get(); + return "LimitedBreaker[" + u + "/" + max.getBytes() + "][" + ByteSizeValue.ofBytes(u) + "/" + max + "]"; + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index f3522cf43b73d..770c56f9c5952 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -223,6 +223,34 @@ public static void testFromXContent( BiConsumer assertEqualsConsumer, boolean assertToXContentEquivalence, ToXContent.Params toXContentParams + ) throws IOException { + testFromXContent( + numberOfTestRuns, + instanceSupplier, + supportsUnknownFields, + shuffleFieldsExceptions, + randomFieldsExcludeFilter, + createParserFunction, + fromXContent, + assertEqualsConsumer, + assertToXContentEquivalence, + toXContentParams, + t -> {} + ); + } + + public static void testFromXContent( + int numberOfTestRuns, + Supplier instanceSupplier, + boolean supportsUnknownFields, + String[] shuffleFieldsExceptions, + Predicate randomFieldsExcludeFilter, + CheckedBiFunction createParserFunction, + CheckedFunction fromXContent, + BiConsumer assertEqualsConsumer, + boolean assertToXContentEquivalence, + ToXContent.Params toXContentParams, + Consumer dispose ) throws IOException { xContentTester(createParserFunction, instanceSupplier, toXContentParams, fromXContent).numberOfTestRuns(numberOfTestRuns) .supportsUnknownFields(supportsUnknownFields) @@ -230,6 +258,7 @@ public static void testFromXContent( .randomFieldsExcludeFilter(randomFieldsExcludeFilter) .assertEqualsConsumer(assertEqualsConsumer) .assertToXContentEquivalence(assertToXContentEquivalence) + .dispose(dispose) .test(); } @@ -248,10 +277,17 @@ public final void testFromXContent() throws IOException { this::parseInstance, this::assertEqualInstances, assertToXContentEquivalence(), - getToXContentParams() + getToXContentParams(), + this::dispose ); } + /** + * Callback invoked after a test instance is no longer needed that can be overridden to release resources associated with the instance. + * @param instance test instance that is no longer used + */ + protected void dispose(T instance) {} + /** * Creates a random test instance to use in the tests. This method will be * called multiple times during test execution and should return a different diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java index 5392986c25507..3adf92e30e15d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java @@ -14,9 +14,11 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.client.internal.FilterClient; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.search.builder.PointInTimeBuilder; import java.util.Arrays; import java.util.Random; @@ -59,7 +61,7 @@ public RandomizingClient(Client client, Random random) { @Override public SearchRequestBuilder prepareSearch(String... indices) { - SearchRequestBuilder searchRequestBuilder = in.prepareSearch(indices) + SearchRequestBuilder searchRequestBuilder = new RandomizedSearchRequestBuilder(this).setIndices(indices) .setSearchType(defaultSearchType) .setPreference(defaultPreference) .setBatchedReduceSize(batchedReduceSize); @@ -84,4 +86,18 @@ public Client in() { return super.in(); } + private class RandomizedSearchRequestBuilder extends SearchRequestBuilder { + RandomizedSearchRequestBuilder(ElasticsearchClient client) { + super(client); + } + + @Override + public SearchRequestBuilder setPointInTime(PointInTimeBuilder pointInTimeBuilder) { + if (defaultPreference != null) { + setPreference(null); + } + return super.setPointInTime(pointInTimeBuilder); + } + } + } diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index fb3696a79a579..21a2c2295c809 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -229,7 +229,7 @@ public void testRejectPointInTimeWithIndices() throws Exception { try { final Request request = new Request("POST", "/_async_search"); setRunAsHeader(request, authorizedUser); - request.addParameter("wait_for_completion_timeout", "true"); + request.addParameter("wait_for_completion_timeout", "1s"); request.addParameter("keep_on_completion", "true"); if (randomBoolean()) { request.addParameter("index", "index-" + authorizedUser); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java index 1766d8fe47820..7ca37f376045f 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java @@ -20,19 +20,19 @@ public class BlobCacheMetrics { public BlobCacheMetrics(MeterRegistry meterRegistry) { this( meterRegistry.registerLongCounter( - "elasticsearch.blob_cache.miss_that_triggered_read", + "es.blob_cache.miss_that_triggered_read.total", "The number of times there was a cache miss that triggered a read from the blob store", "count" ), meterRegistry.registerLongCounter( - "elasticsearch.blob_cache.count_of_evicted_used_regions", + "es.blob_cache.count_of_evicted_used_regions.total", "The number of times a cache entry was evicted where the frequency was not zero", "entries" ), meterRegistry.registerLongHistogram( - "elasticsearch.blob_cache.cache_miss_load_times", - "The timing data for populating entries in the blob store resulting from a cache miss.", - "count" + "es.blob_cache.cache_miss_load_times.histogram", + "The time in microseconds for populating entries in the blob store resulting from a cache miss, expressed as a histogram.", + "micros" ) ); } diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index be95f5c883de8..5e8933f86ae7d 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -55,6 +55,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.LongAdder; import java.util.function.IntConsumer; @@ -815,7 +816,7 @@ public int populateAndRead( ) throws Exception { // We are interested in the total time that the system spends when fetching a result (including time spent queuing), so we start // our measurement here. - final long startTime = threadPool.relativeTimeInMillis(); + final long startTime = threadPool.relativeTimeInNanos(); RangeMissingHandler writerInstrumentationDecorator = ( SharedBytes.IO channel, int channelPos, @@ -823,7 +824,7 @@ public int populateAndRead( int length, IntConsumer progressUpdater) -> { writer.fillCacheRange(channel, channelPos, relativePos, length, progressUpdater); - var elapsedTime = threadPool.relativeTimeInMillis() - startTime; + var elapsedTime = TimeUnit.NANOSECONDS.toMicros(threadPool.relativeTimeInNanos() - startTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment(); }; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 133819cd601d7..f10e7cf170bde 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -160,6 +160,12 @@ public Iterator> settings() { Property.NodeScope ); + /** Optional setting to prevent startup if required providers are not discovered at runtime */ + public static final Setting> FIPS_REQUIRED_PROVIDERS = Setting.stringListSetting( + "xpack.security.fips_mode.required_providers", + Property.NodeScope + ); + /** * Setting for enabling the enrollment process, ie the enroll APIs are enabled, and the initial cluster node generates and displays * enrollment tokens (for Kibana and sometimes for ES nodes) when starting up for the first time. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java index 945084395448a..efc31aacf5e20 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriConsumer; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskManager; @@ -144,7 +145,17 @@ private void getSearchResponseFromIndex( long nowInMillis, ActionListener listener ) { - store.getResponse(searchId, true, listener.delegateFailure((l, response) -> sendFinalResponse(request, response, nowInMillis, l))); + store.getResponse(searchId, true, listener.delegateFailure((l, response) -> { + try { + sendFinalResponse(request, response, nowInMillis, l); + } finally { + if (response instanceof StoredAsyncResponse storedAsyncResponse + && storedAsyncResponse.getResponse() instanceof RefCounted refCounted) { + refCounted.decRef(); + } + } + + })); } private void sendFinalResponse(GetAsyncResultRequest request, Response response, long nowInMillis, ActionListener listener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index e6e4ea1001f68..d09b96f897e06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -20,6 +20,8 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.io.IOException; import java.util.Objects; @@ -82,7 +84,16 @@ public void writeTo(StreamOutput out) throws IOException { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = new ActionRequestValidationException(); + if (MlStrings.isValidId(this.modelId) == false) { + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.modelId)); + } + + if (validationException.validationErrors().isEmpty() == false) { + return validationException; + } else { + return null; + } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java index 7cef2bed04ce3..6209ead0cc6a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -229,9 +229,13 @@ public static SnapshotUpgradeState getSnapshotUpgradeState(@Nullable PersistentT public static DatafeedState getDatafeedState(String datafeedId, @Nullable PersistentTasksCustomMetadata tasks) { PersistentTasksCustomMetadata.PersistentTask task = getDatafeedTask(datafeedId, tasks); + return getDatafeedState(task); + } + + public static DatafeedState getDatafeedState(PersistentTasksCustomMetadata.PersistentTask task) { if (task == null) { // If we haven't started a datafeed then there will be no persistent task, - // which is the same as if the datafeed was't started + // which is the same as if the datafeed wasn't started return DatafeedState.STOPPED; } DatafeedState taskState = (DatafeedState) task.getState(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java similarity index 99% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java index 0847479489ec2..1d6c5e564a442 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference; +package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java index fd2f3627e3fb1..826b0785aa563 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java @@ -86,6 +86,10 @@ public int getTargetAllocations() { return targetAllocations; } + public int getFailedAllocations() { + return state == RoutingState.FAILED ? targetAllocations : 0; + } + public RoutingState getState() { return state; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index d27d325a5c596..8147dabda7b48 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -287,6 +287,10 @@ public int totalTargetAllocations() { return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getTargetAllocations).sum(); } + public int totalFailedAllocations() { + return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getFailedAllocations).sum(); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java similarity index 98% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java index aabedfc4351b5..36fec9ec7b243 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference.assignment; +package org.elasticsearch.xpack.core.ml.inference.assignment; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; @@ -23,7 +23,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java similarity index 76% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java index 3640d8dcb2808..fa0ce4a095ba0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java @@ -5,15 +5,10 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference.assignment; +package org.elasticsearch.xpack.core.ml.inference.assignment; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfoUpdate; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; -import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import java.util.List; import java.util.Optional; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 4968352439fb0..dc4e5d15d265f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -78,7 +78,11 @@ public final class IndexPrivilege extends Privilege { private static final Automaton READ_CROSS_CLUSTER_AUTOMATON = patterns( "internal:transport/proxy/indices:data/read/*", ClusterSearchShardsAction.NAME, - TransportSearchShardsAction.TYPE.name() + TransportSearchShardsAction.TYPE.name(), + // cross clusters query for ESQL + "internal:data/read/esql/open_exchange", + "internal:data/read/esql/exchange", + "indices:data/read/esql/cluster" ); private static final Automaton CREATE_AUTOMATON = patterns( "indices:data/write/index*", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java new file mode 100644 index 0000000000000..10f35bf33f631 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.utils.MlStringsTests; +import org.junit.Before; + +import java.util.Locale; + +public class PutInferenceModelActionTests extends ESTestCase { + public static String TASK_TYPE; + public static String MODEL_ID; + public static XContentType X_CONTENT_TYPE; + public static BytesReference BYTES; + + @Before + public void setup() throws Exception { + TASK_TYPE = TaskType.ANY.toString(); + MODEL_ID = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); + X_CONTENT_TYPE = randomFrom(XContentType.values()); + BYTES = new BytesArray(randomAlphaOfLengthBetween(1, 10)); + } + + public void testValidate() { + // valid model ID + var request = new PutInferenceModelAction.Request(TASK_TYPE, MODEL_ID + "_-0", BYTES, X_CONTENT_TYPE); + ActionRequestValidationException validationException = request.validate(); + assertNull(validationException); + + // invalid model IDs + + var invalidRequest = new PutInferenceModelAction.Request(TASK_TYPE, "", BYTES, X_CONTENT_TYPE); + validationException = invalidRequest.validate(); + assertNotNull(validationException); + + var invalidRequest2 = new PutInferenceModelAction.Request( + TASK_TYPE, + randomAlphaOfLengthBetween(1, 10) + randomFrom(MlStringsTests.SOME_INVALID_CHARS), + BYTES, + X_CONTENT_TYPE + ); + validationException = invalidRequest2.validate(); + assertNotNull(validationException); + + var invalidRequest3 = new PutInferenceModelAction.Request(TASK_TYPE, null, BYTES, X_CONTENT_TYPE); + validationException = invalidRequest3.validate(); + assertNotNull(validationException); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java index 28ebf8b2445c5..830f7dde7c7d8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java @@ -69,4 +69,17 @@ public void testIsRoutable_GivenStartedWithNonZeroAllocations() { RoutingInfo routingInfo = new RoutingInfo(randomIntBetween(1, 10), 1, RoutingState.STARTED, ""); assertThat(routingInfo.isRoutable(), is(true)); } + + public void testGetFailedAllocations() { + int targetAllocations = randomIntBetween(1, 10); + RoutingInfo routingInfo = new RoutingInfo( + randomIntBetween(0, targetAllocations), + targetAllocations, + randomFrom(RoutingState.STARTING, RoutingState.STARTED, RoutingState.STOPPING), + "" + ); + assertThat(routingInfo.getFailedAllocations(), is(0)); + routingInfo = new RoutingInfo(randomIntBetween(0, targetAllocations), targetAllocations, RoutingState.FAILED, ""); + assertThat(routingInfo.getFailedAllocations(), is(targetAllocations)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java similarity index 87% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java index fb60ac39bdef1..04681fe6e0cd0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java @@ -4,10 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.ml.utils; +package org.elasticsearch.xpack.core.ml.utils; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.util.Arrays; import java.util.Collections; @@ -22,6 +21,37 @@ public class MlStringsTests extends ESTestCase { + public static final String[] SOME_INVALID_CHARS = { + "%", + " ", + "!", + "@", + "#", + "$", + "^", + "&", + "*", + "(", + ")", + "+", + "=", + "{", + "}", + "[", + "]", + "|", + "\\", + ":", + ";", + "\"", + "'", + "<", + ">", + ",", + "?", + "/", + "~" }; + public void testDoubleQuoteIfNotAlphaNumeric() { assertEquals("foo2", MlStrings.doubleQuoteIfNotAlphaNumeric("foo2")); assertEquals("\"fo o\"", MlStrings.doubleQuoteIfNotAlphaNumeric("fo o")); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 557da6e584f24..0eb93c59c5b1d 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -407,6 +407,38 @@ setup: "fixed_interval": "1h" } +--- +"Downsample failure": + - skip: + version: " - 8.12.99" + reason: "#103615 merged to 8.13.0 and later" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [my-template1] has index patterns [failed-downsample-test] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" + indices.put_index_template: + name: my-template1 + body: + index_patterns: [failed-downsample-test] + template: + settings: + index: + routing: + allocation: + include: + does-not-exist: "yes" + + - do: + catch: /downsample task \[downsample-failed-downsample-test-0-1h\] failed/ + indices.downsample: + index: test + target_index: failed-downsample-test + body: > + { + "fixed_interval": "1h" + } + --- "Downsample to existing index": - skip: diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java index 874b68a4bec55..30066e21e4960 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java @@ -429,7 +429,7 @@ private void downsample(final String sourceIndex, final String downsampleIndex, assertAcked( internalCluster().client() .execute(DownsampleAction.INSTANCE, new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config)) - .actionGet(TIMEOUT.millis()) + .actionGet(TIMEOUT) ); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index 06e69ab4702c1..ebf31bd32b48f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -159,12 +159,13 @@ private void delegate( final DownsampleShardTaskParams params, final SearchHit[] lastDownsampledTsidHits ) { + DownsampleShardTask downsampleShardTask = (DownsampleShardTask) task; client.execute( DelegatingAction.INSTANCE, - new DelegatingAction.Request((DownsampleShardTask) task, lastDownsampledTsidHits, params), + new DelegatingAction.Request(downsampleShardTask, lastDownsampledTsidHits, params), ActionListener.wrap(empty -> {}, e -> { LOGGER.error("error while delegating", e); - markAsFailed(task, e); + markAsFailed(downsampleShardTask, e); }) ); } @@ -222,7 +223,8 @@ protected void doRun() throws Exception { }); } - private static void markAsFailed(AllocatedPersistentTask task, Exception e) { + private static void markAsFailed(DownsampleShardTask task, Exception e) { + task.setDownsampleShardIndexerStatus(DownsampleShardIndexerStatus.FAILED); task.updatePersistentTaskState( new DownsampleShardPersistentTaskState(DownsampleShardIndexerStatus.FAILED, null), ActionListener.running(() -> task.markAsFailed(e)) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 7ebe77529702f..e7bd2f0c0fb27 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -400,6 +400,19 @@ private void performShardDownsampling( @Override public void onResponse(PersistentTasksCustomMetadata.PersistentTask persistentTask) { + if (persistentTask != null) { + var runningPersistentTaskState = (DownsampleShardPersistentTaskState) persistentTask.getState(); + if (runningPersistentTaskState != null) { + if (runningPersistentTaskState.failed()) { + onFailure(new ElasticsearchException("downsample task [" + persistentTaskId + "] failed")); + return; + } else if (runningPersistentTaskState.cancelled()) { + onFailure(new ElasticsearchException("downsample task [" + persistentTaskId + "] cancelled")); + return; + } + } + } + logger.info("Downsampling task [" + persistentTaskId + " completed for shard " + params.shardId()); if (countDown.decrementAndGet() == 0) { logger.info("All downsampling tasks completed [" + numberOfShards + "]"); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java index 4e4817d4c041d..befb2c7503515 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java @@ -100,6 +100,7 @@ private void searchWithPIT(MultiSearchRequest search, ActionListener *

- * The generation code also looks for the optional methods {@code combineStates} + * The generation code also looks for the optional methods {@code combineIntermediate} * and {@code evaluateFinal} which are used to combine intermediate states and * produce the final output. If the first is missing then the generated code will * call the {@code combine} method to combine intermediate states. If the second diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 6acddf6aa5cde..c00045d342fc2 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -69,7 +69,6 @@ public class AggregatorImplementer { private final ExecutableElement init; private final ExecutableElement combine; private final ExecutableElement combineValueCount; - private final ExecutableElement combineStates; private final ExecutableElement combineIntermediate; private final ExecutableElement evaluateFinal; private final ClassName implementation; @@ -95,7 +94,6 @@ public AggregatorImplementer(Elements elements, TypeElement declarationType, Int return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); }); this.combineValueCount = findMethod(declarationType, "combineValueCount"); - this.combineStates = findMethod(declarationType, "combineStates"); this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); @@ -399,34 +397,30 @@ private MethodSpec addIntermediateInput() { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page"); builder.addStatement("assert channels.size() == intermediateBlockCount()"); builder.addStatement("assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size()"); - builder.addStatement("Block uncastBlock = page.getBlock(channels.get(0))"); - builder.beginControlFlow("if (uncastBlock.areAllValuesNull())"); - { - builder.addStatement("return"); - builder.endControlFlow(); - } - int count = 0; - for (var interState : intermediateState) { + for (int i = 0; i < intermediateState.size(); i++) { + var interState = intermediateState.get(i); + ClassName blockType = blockType(interState.elementType()); + builder.addStatement("Block $L = page.getBlock(channels.get($L))", interState.name + "Uncast", i); + builder.beginControlFlow("if ($L.areAllValuesNull())", interState.name + "Uncast"); + { + builder.addStatement("return"); + builder.endControlFlow(); + } builder.addStatement( - "$T " + interState.name() + " = page.<$T>getBlock(channels.get(" + count + ")).asVector()", + "$T $L = (($T) $L).asVector()", vectorType(interState.elementType()), - blockType(interState.elementType()) - ); - count++; - } - final String first = intermediateState.get(0).name(); - builder.addStatement("assert " + first + ".getPositionCount() == 1"); - if (intermediateState.size() > 1) { - builder.addStatement( - "assert " - + intermediateState.stream() - .map(IntermediateStateDesc::name) - .skip(1) - .map(s -> first + ".getPositionCount() == " + s + ".getPositionCount()") - .collect(joining(" && ")) + interState.name(), + blockType, + interState.name() + "Uncast" ); + builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } - if (hasPrimitiveState()) { + if (combineIntermediate != null) { + if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); + } + builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); + } else if (hasPrimitiveState()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); builder.beginControlFlow("if (seen.getBoolean(0))"); @@ -438,10 +432,7 @@ private MethodSpec addIntermediateInput() { builder.endControlFlow(); } } else { - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { - builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); - } - builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); + throw new IllegalArgumentException("Don't know how to combine intermediate input. Define combineIntermediate"); } return builder.build(); } @@ -468,7 +459,7 @@ private String primitiveStateMethod() { return "doubleValue"; default: throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + stateType + ". define combineStates." + "don't know how to fetch primitive values from " + stateType + ". define combineIntermediate." ); } } @@ -493,7 +484,7 @@ private MethodSpec evaluateFinal() { .addParameter(DRIVER_CONTEXT, "driverContext"); if (stateTypeHasSeen) { builder.beginControlFlow("if (state.seen() == false)"); - builder.addStatement("blocks[offset] = $T.constantNullBlock(1, driverContext.blockFactory())", BLOCK); + builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1)", BLOCK); builder.addStatement("return"); builder.endControlFlow(); } @@ -508,22 +499,13 @@ private MethodSpec evaluateFinal() { private void primitiveStateToResult(MethodSpec.Builder builder) { switch (stateType.toString()) { case "org.elasticsearch.compute.aggregation.IntState": - builder.addStatement( - "blocks[offset] = $T.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory())", - INT_BLOCK - ); + builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)"); return; case "org.elasticsearch.compute.aggregation.LongState": - builder.addStatement( - "blocks[offset] = $T.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory())", - LONG_BLOCK - ); + builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)"); return; case "org.elasticsearch.compute.aggregation.DoubleState": - builder.addStatement( - "blocks[offset] = $T.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory())", - DOUBLE_BLOCK - ); + builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)"); return; default: throw new IllegalArgumentException("don't know how to convert state to result: " + stateType); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java index f651ab2a316aa..6cd72bd643c32 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java @@ -194,7 +194,7 @@ static String vectorAccessorName(String elementTypeName) { case "DOUBLE" -> "getDouble"; case "BYTES_REF" -> "getBytesRef"; default -> throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + elementTypeName + ". define combineStates." + "don't know how to fetch primitive values from " + elementTypeName + ". define combineIntermediate." ); }; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index f77f1893caa01..5b82950c7de37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntVector; @@ -60,14 +59,14 @@ void set(int groupId, double value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (DoubleVector.Builder builder = DoubleVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (DoubleVector.Builder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendDouble(values.get(selected.getInt(i))); } return builder.build().asBlock(); } } - try (DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (hasValue(group)) { @@ -98,8 +97,8 @@ public void toIntermediate( ) { assert blocks.length >= offset + 2; try ( - var valuesBuilder = DoubleBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var hasValueBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); @@ -111,7 +110,7 @@ public void toIntermediate( hasValueBuilder.appendBoolean(hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); - blocks[offset + 1] = hasValueBuilder.build(); + blocks[offset + 1] = hasValueBuilder.build().asBlock(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 82578090503ab..0234f36f6675c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; @@ -59,14 +58,14 @@ void set(int groupId, int value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (IntVector.Builder builder = IntVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (IntVector.Builder builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendInt(values.get(selected.getInt(i))); } return builder.build().asBlock(); } } - try (IntBlock.Builder builder = IntBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (hasValue(group)) { @@ -97,8 +96,8 @@ public void toIntermediate( ) { assert blocks.length >= offset + 2; try ( - var valuesBuilder = IntBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var hasValueBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var valuesBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); @@ -110,7 +109,7 @@ public void toIntermediate( hasValueBuilder.appendBoolean(hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); - blocks[offset + 1] = hasValueBuilder.build(); + blocks[offset + 1] = hasValueBuilder.build().asBlock(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index f77d22fb1d26a..860bf43eaad82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -66,14 +65,14 @@ void increment(int groupId, long value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (LongVector.Builder builder = LongVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendLong(values.get(selected.getInt(i))); } return builder.build().asBlock(); } } - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (hasValue(group)) { @@ -104,8 +103,8 @@ public void toIntermediate( ) { assert blocks.length >= offset + 2; try ( - var valuesBuilder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var hasValueBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var valuesBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); @@ -117,7 +116,7 @@ public void toIntermediate( hasValueBuilder.appendBoolean(hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); - blocks[offset + 1] = hasValueBuilder.build(); + blocks[offset + 1] = hasValueBuilder.build().asBlock(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index b73b993379078..f9b8358faee6b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -8,19 +8,19 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of boolean. + * Block implementation that stores values in a {@link BooleanArrayVector}. * This class is generated. Do not edit it. */ final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayBlock.class); - private final boolean[] values; + private final BooleanArrayVector vector; BooleanArrayBlock( boolean[] values, @@ -31,7 +31,7 @@ final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new BooleanArrayVector(values, values.length, blockFactory); } @Override @@ -41,7 +41,7 @@ public BooleanVector asVector() { @Override public boolean getBoolean(int valueIndex) { - return values[valueIndex]; + return vector.getBoolean(valueIndex); } @Override @@ -79,7 +79,7 @@ public BooleanBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,14 +96,13 @@ public BooleanBlock expand() { } } - public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -126,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java index b07a5222c1462..17ed741bd59da 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java @@ -14,13 +14,14 @@ import java.util.BitSet; /** - * Block implementation that stores values in a BooleanArray. + * Block implementation that stores values in a {@link BooleanBigArrayVector}. Does not take ownership of the given + * {@link BitArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class BooleanBigArrayBlock extends AbstractArrayBlock implements BooleanBlock { private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this - private final BitArray values; + private final BooleanBigArrayVector vector; public BooleanBigArrayBlock( BitArray values, @@ -31,7 +32,7 @@ public BooleanBigArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new BooleanBigArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -41,7 +42,7 @@ public BooleanVector asVector() { @Override public boolean getBoolean(int valueIndex) { - return values.get(valueIndex); + return vector.getBoolean(valueIndex); } @Override @@ -79,7 +80,7 @@ public BooleanBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,10 +97,13 @@ public BooleanBlock expand() { } } + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); } @Override @@ -123,13 +127,19 @@ public String toString() { + ", mvOrdering=" + mvOrdering() + ", ramBytesUsed=" - + values.ramBytesUsed() + + vector.ramBytesUsed() + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + RamUsageEstimator.sizeOf(values), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 1de8844d01634..9618edb1fa77a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed BooleanArray. + * Vector implementation that defers to an enclosed {@link BitArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class BooleanBigArrayVector extends AbstractVector implements BooleanVector, Releasable { @@ -64,11 +65,9 @@ public BooleanVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BitArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 5512db7df8b11..fffa3af137d76 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -166,44 +166,6 @@ static int hash(BooleanBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBooleanBlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static BooleanBlock newConstantBlockWith(boolean value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} - */ - @Deprecated - static BooleanBlock newConstantBlockWith(boolean value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstantBooleanBlockWith(value, positions); - } - /** * Builder for {@link BooleanBlock} */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index ec4ab8f7def1c..7c86f40981ec7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -101,40 +101,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBooleanVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newBooleanVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newBooleanVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder { + sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder, FixedBuilder { /** * Appends a boolean to the current entry. */ @@ -147,13 +117,11 @@ sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits BooleanVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits BooleanVectorFixedBuilder { /** * Appends a boolean to the current entry. */ - FixedBuilder appendBoolean(boolean value); - @Override - BooleanVector build(); + FixedBuilder appendBoolean(boolean value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index b1376b9012498..d707e3cf901c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a BooleanVector. + * Block view of a {@link BooleanVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class BooleanVectorBlock extends AbstractVectorBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 3ee116bcf767b..e2598d3d86b8f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -15,14 +15,15 @@ import java.util.BitSet; /** - * Block implementation that stores an array of BytesRef. + * Block implementation that stores values in a {@link BytesRefArrayVector}. + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayBlock.class); - private final BytesRefArray values; + private final BytesRefArrayVector vector; BytesRefArrayBlock( BytesRefArray values, @@ -33,7 +34,7 @@ final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlo BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new BytesRefArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -43,7 +44,7 @@ public BytesRefVector asVector() { @Override public BytesRef getBytesRef(int valueIndex, BytesRef dest) { - return values.get(valueIndex, dest); + return vector.getBytesRef(valueIndex, dest); } @Override @@ -82,7 +83,7 @@ public BytesRefBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector final BytesRef scratch = new BytesRef(); try (var builder = blockFactory().newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { @@ -100,14 +101,13 @@ public BytesRefBlock expand() { } } - public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -130,14 +130,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + values.size() + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 720be83aa040a..53e5ee61787c6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -14,6 +14,7 @@ /** * Vector implementation that stores an array of BytesRef values. + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector { @@ -86,11 +87,9 @@ public String toString() { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BytesRefArray} is adjusted outside + // of this class. blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 50611f3e15130..8ed17a1435302 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -171,44 +171,6 @@ static int hash(BytesRefBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBytesRefBlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static BytesRefBlock newConstantBlockWith(BytesRef value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} - */ - @Deprecated - static BytesRefBlock newConstantBlockWith(BytesRef value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstantBytesRefBlockWith(value, positions); - } - /** * Builder for {@link BytesRefBlock} */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index b7011666b981d..5c56ece72c298 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -101,25 +101,6 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. - * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBytesRefVectorBuilder(estimatedSize); - } - /** * A builder that grows as needed. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 55d04dbfb823a..92f93d5d23a49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a BytesRefVector. + * Block view of a {@link BytesRefVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class BytesRefVectorBlock extends AbstractVectorBlock implements BytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 494ab9faf8570..16d70d1a0e800 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -70,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index c33e0e935c37a..57ec1c945ade5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -75,13 +75,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 26ff912a88ec3..a783f0243313e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -70,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 452f4809fa8aa..56573e985c387 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -70,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 6c75f27f50070..0173f1c1d4d7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -70,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 626a29fa1d1b7..96e96ac459a50 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -8,19 +8,19 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of double. + * Block implementation that stores values in a {@link DoubleArrayVector}. * This class is generated. Do not edit it. */ final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayBlock.class); - private final double[] values; + private final DoubleArrayVector vector; DoubleArrayBlock( double[] values, @@ -31,7 +31,7 @@ final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new DoubleArrayVector(values, values.length, blockFactory); } @Override @@ -41,7 +41,7 @@ public DoubleVector asVector() { @Override public double getDouble(int valueIndex) { - return values[valueIndex]; + return vector.getDouble(valueIndex); } @Override @@ -79,7 +79,7 @@ public DoubleBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,14 +96,13 @@ public DoubleBlock expand() { } } - public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -126,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java index 8d7083f1ac380..5b1dcbfc9d728 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java @@ -14,13 +14,14 @@ import java.util.BitSet; /** - * Block implementation that stores values in a DoubleArray. + * Block implementation that stores values in a {@link DoubleBigArrayVector}. Does not take ownership of the given + * {@link DoubleArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class DoubleBigArrayBlock extends AbstractArrayBlock implements DoubleBlock { private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this - private final DoubleArray values; + private final DoubleBigArrayVector vector; public DoubleBigArrayBlock( DoubleArray values, @@ -31,7 +32,7 @@ public DoubleBigArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new DoubleBigArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -41,7 +42,7 @@ public DoubleVector asVector() { @Override public double getDouble(int valueIndex) { - return values.get(valueIndex); + return vector.getDouble(valueIndex); } @Override @@ -79,7 +80,7 @@ public DoubleBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,10 +97,13 @@ public DoubleBlock expand() { } } + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); } @Override @@ -123,13 +127,19 @@ public String toString() { + ", mvOrdering=" + mvOrdering() + ", ramBytesUsed=" - + values.ramBytesUsed() + + vector.ramBytesUsed() + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + RamUsageEstimator.sizeOf(values), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index e7e8cf12e0355..45b9b4bec14ba 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed DoubleArray. + * Vector implementation that defers to an enclosed {@link DoubleArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class DoubleBigArrayVector extends AbstractVector implements DoubleVector, Releasable { @@ -62,11 +63,9 @@ public DoubleVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 16c60a1d6486e..890f965c765bb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -167,44 +167,6 @@ static int hash(DoubleBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newDoubleBlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static DoubleBlock newConstantBlockWith(double value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} - */ - @Deprecated - static DoubleBlock newConstantBlockWith(double value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstantDoubleBlockWith(value, positions); - } - /** * Builder for {@link DoubleBlock} */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index acabd0deb17f6..f54044874acdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -102,40 +102,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newDoubleVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newDoubleVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newDoubleVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder { + sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder, FixedBuilder { /** * Appends a double to the current entry. */ @@ -148,13 +118,11 @@ sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits DoubleVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits DoubleVectorFixedBuilder { /** * Appends a double to the current entry. */ - FixedBuilder appendDouble(double value); - @Override - DoubleVector build(); + FixedBuilder appendDouble(double value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index e6b9fbd5a4524..2aa8e07c25604 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a DoubleVector. + * Block view of a {@link DoubleVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class DoubleVectorBlock extends AbstractVectorBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 5e927a53fcebe..e8f10ced11adc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -8,19 +8,19 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of int. + * Block implementation that stores values in a {@link IntArrayVector}. * This class is generated. Do not edit it. */ final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayBlock.class); - private final int[] values; + private final IntArrayVector vector; IntArrayBlock( int[] values, @@ -31,7 +31,7 @@ final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new IntArrayVector(values, values.length, blockFactory); } @Override @@ -41,7 +41,7 @@ public IntVector asVector() { @Override public int getInt(int valueIndex) { - return values[valueIndex]; + return vector.getInt(valueIndex); } @Override @@ -79,7 +79,7 @@ public IntBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,14 +96,13 @@ public IntBlock expand() { } } - public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -126,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java index d3a1fedfb3a1a..ad6033fb452a0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java @@ -14,13 +14,14 @@ import java.util.BitSet; /** - * Block implementation that stores values in a IntArray. + * Block implementation that stores values in a {@link IntBigArrayVector}. Does not take ownership of the given + * {@link IntArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class IntBigArrayBlock extends AbstractArrayBlock implements IntBlock { private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this - private final IntArray values; + private final IntBigArrayVector vector; public IntBigArrayBlock( IntArray values, @@ -31,7 +32,7 @@ public IntBigArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new IntBigArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -41,7 +42,7 @@ public IntVector asVector() { @Override public int getInt(int valueIndex) { - return values.get(valueIndex); + return vector.getInt(valueIndex); } @Override @@ -79,7 +80,7 @@ public IntBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,10 +97,13 @@ public IntBlock expand() { } } + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); } @Override @@ -123,13 +127,19 @@ public String toString() { + ", mvOrdering=" + mvOrdering() + ", ramBytesUsed=" - + values.ramBytesUsed() + + vector.ramBytesUsed() + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + RamUsageEstimator.sizeOf(values), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index d82c3bfe8b8a8..b553c8aab8761 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed IntArray. + * Vector implementation that defers to an enclosed {@link IntArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class IntBigArrayVector extends AbstractVector implements IntVector, Releasable { @@ -62,11 +63,9 @@ public IntVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link IntArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 48bce0bb848e9..9a66445eb55a2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -166,44 +166,6 @@ static int hash(IntBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newIntBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#newIntBlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newIntBlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantIntBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static IntBlock newConstantBlockWith(int value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstantIntBlockWith} - */ - @Deprecated - static IntBlock newConstantBlockWith(int value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstantIntBlockWith(value, positions); - } - /** * Builder for {@link IntBlock} */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 645288565c431..bc7e3c87ec33d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -101,36 +101,6 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newIntVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newIntVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newIntVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newIntVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newIntVectorFixedBuilder(size); - } - /** Create a vector for a range of ints. */ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockFactory) { int[] values = new int[endExclusive - startInclusive]; @@ -143,7 +113,7 @@ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockF /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits IntVectorBuilder { + sealed interface Builder extends Vector.Builder permits IntVectorBuilder, FixedBuilder { /** * Appends a int to the current entry. */ @@ -156,13 +126,11 @@ sealed interface Builder extends Vector.Builder permits IntVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits IntVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits IntVectorFixedBuilder { /** * Appends a int to the current entry. */ - FixedBuilder appendInt(int value); - @Override - IntVector build(); + FixedBuilder appendInt(int value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index f60839ed330f2..97a4a48533e3a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a IntVector. + * Block view of a {@link IntVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class IntVectorBlock extends AbstractVectorBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 67a77a2cd171f..792f9b267e748 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -8,19 +8,19 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of long. + * Block implementation that stores values in a {@link LongArrayVector}. * This class is generated. Do not edit it. */ final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayBlock.class); - private final long[] values; + private final LongArrayVector vector; LongArrayBlock( long[] values, @@ -31,7 +31,7 @@ final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new LongArrayVector(values, values.length, blockFactory); } @Override @@ -41,7 +41,7 @@ public LongVector asVector() { @Override public long getLong(int valueIndex) { - return values[valueIndex]; + return vector.getLong(valueIndex); } @Override @@ -79,7 +79,7 @@ public LongBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,14 +96,13 @@ public LongBlock expand() { } } - public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -126,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java index 0992342f8583b..dc19a4038a9e9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java @@ -14,13 +14,14 @@ import java.util.BitSet; /** - * Block implementation that stores values in a LongArray. + * Block implementation that stores values in a {@link LongBigArrayVector}. Does not take ownership of the given + * {@link LongArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class LongBigArrayBlock extends AbstractArrayBlock implements LongBlock { private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this - private final LongArray values; + private final LongBigArrayVector vector; public LongBigArrayBlock( LongArray values, @@ -31,7 +32,7 @@ public LongBigArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new LongBigArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -41,7 +42,7 @@ public LongVector asVector() { @Override public long getLong(int valueIndex) { - return values.get(valueIndex); + return vector.getLong(valueIndex); } @Override @@ -79,7 +80,7 @@ public LongBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,10 +97,13 @@ public LongBlock expand() { } } + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); } @Override @@ -123,13 +127,19 @@ public String toString() { + ", mvOrdering=" + mvOrdering() + ", ramBytesUsed=" - + values.ramBytesUsed() + + vector.ramBytesUsed() + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + RamUsageEstimator.sizeOf(values), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index a79b1231366f2..d5ea5c9e2a453 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed LongArray. + * Vector implementation that defers to an enclosed {@link LongArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class LongBigArrayVector extends AbstractVector implements LongVector, Releasable { @@ -62,11 +63,9 @@ public LongVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link LongArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index e35971757ae26..5e5dc0606b896 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -167,44 +167,6 @@ static int hash(LongBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newLongBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#newLongBlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newLongBlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantLongBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static LongBlock newConstantBlockWith(long value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstantLongBlockWith} - */ - @Deprecated - static LongBlock newConstantBlockWith(long value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstantLongBlockWith(value, positions); - } - /** * Builder for {@link LongBlock} */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index a312d7aeab0cc..358f5b32366cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -102,40 +102,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newLongVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newLongVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newLongVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newLongVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newLongVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits LongVectorBuilder { + sealed interface Builder extends Vector.Builder permits LongVectorBuilder, FixedBuilder { /** * Appends a long to the current entry. */ @@ -148,13 +118,11 @@ sealed interface Builder extends Vector.Builder permits LongVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits LongVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits LongVectorFixedBuilder { /** * Appends a long to the current entry. */ - FixedBuilder appendLong(long value); - @Override - LongVector build(); + FixedBuilder appendLong(long value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index 8654c5d5c3bea..1f4565fec5a8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a LongVector. + * Block view of a {@link LongVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class LongVectorBlock extends AbstractVectorBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java index 1fd4c1ea3562d..89388cd9cc109 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java @@ -49,7 +49,7 @@ public BytesRefBlock dedupeToBlockAdaptive(BlockFactory blockFactory) { block.incRef(); return block; } - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -99,7 +99,7 @@ public BytesRefBlock dedupeToBlockUsingCopyAndSort(BlockFactory blockFactory) { block.incRef(); return block; } - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -129,7 +129,7 @@ public BytesRefBlock dedupeToBlockUsingCopyMissing(BlockFactory blockFactory) { block.incRef(); return block; } - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java index 157b6670e95af..6066dbe8a74e0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java @@ -46,7 +46,7 @@ public DoubleBlock dedupeToBlockAdaptive(BlockFactory blockFactory) { block.incRef(); return block; } - try (DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -96,7 +96,7 @@ public DoubleBlock dedupeToBlockUsingCopyAndSort(BlockFactory blockFactory) { block.incRef(); return block; } - try (DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -126,7 +126,7 @@ public DoubleBlock dedupeToBlockUsingCopyMissing(BlockFactory blockFactory) { block.incRef(); return block; } - try (DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java index eade5e1fdc347..3961208d5e46f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java @@ -46,7 +46,7 @@ public IntBlock dedupeToBlockAdaptive(BlockFactory blockFactory) { block.incRef(); return block; } - try (IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -96,7 +96,7 @@ public IntBlock dedupeToBlockUsingCopyAndSort(BlockFactory blockFactory) { block.incRef(); return block; } - try (IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -126,7 +126,7 @@ public IntBlock dedupeToBlockUsingCopyMissing(BlockFactory blockFactory) { block.incRef(); return block; } - try (IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java index acbc9139a75c5..a3012ffa551b2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java @@ -47,7 +47,7 @@ public LongBlock dedupeToBlockAdaptive(BlockFactory blockFactory) { block.incRef(); return block; } - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -97,7 +97,7 @@ public LongBlock dedupeToBlockUsingCopyAndSort(BlockFactory blockFactory) { block.incRef(); return block; } - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -127,7 +127,7 @@ public LongBlock dedupeToBlockUsingCopyMissing(BlockFactory blockFactory) { block.incRef(); return block; } - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 3d568adc2b5ea..184ef69f00d85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -24,7 +24,7 @@ class ResultBuilderForBoolean implements ResultBuilder { ResultBuilderForBoolean(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = BooleanBlock.newBlockBuilder(initialSize, blockFactory); + this.builder = blockFactory.newBooleanBlockBuilder(initialSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index e37f82f3363a9..4008f7fbd924b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -28,7 +28,7 @@ class ResultBuilderForBytesRef implements ResultBuilder { ResultBuilderForBytesRef(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { this.encoder = encoder; this.inKey = inKey; - this.builder = BytesRefBlock.newBlockBuilder(initialSize, blockFactory); + this.builder = blockFactory.newBytesRefBlockBuilder(initialSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index 77c976c6e0085..f06a1e814ef43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -24,7 +24,7 @@ class ResultBuilderForDouble implements ResultBuilder { ResultBuilderForDouble(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = DoubleBlock.newBlockBuilder(initialSize, blockFactory); + this.builder = blockFactory.newDoubleBlockBuilder(initialSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 389ed3bc2e3c3..848bbf9ab6a0a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -24,7 +24,7 @@ class ResultBuilderForInt implements ResultBuilder { ResultBuilderForInt(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = IntBlock.newBlockBuilder(initialSize, blockFactory); + this.builder = blockFactory.newIntBlockBuilder(initialSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index 63ee9d35c59e5..b4361ad83180a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -24,7 +24,7 @@ class ResultBuilderForLong implements ResultBuilder { ResultBuilderForLong(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = LongBlock.newBlockBuilder(initialSize, blockFactory); + this.builder = blockFactory.newLongBlockBuilder(initialSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index dd5450d3b460c..e9b4498d50265 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -86,14 +86,18 @@ private void addRawBlock(BooleanBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block fbitUncast = page.getBlock(channels.get(0)); + if (fbitUncast.areAllValuesNull()) { return; } - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + BooleanVector fbit = ((BooleanBlock) fbitUncast).asVector(); assert fbit.getPositionCount() == 1; - assert fbit.getPositionCount() == tbit.getPositionCount(); + Block tbitUncast = page.getBlock(channels.get(1)); + if (tbitUncast.areAllValuesNull()) { + return; + } + BooleanVector tbit = ((BooleanBlock) tbitUncast).asVector(); + assert tbit.getPositionCount() == 1; CountDistinctBooleanAggregator.combineIntermediate(state, fbit.getBoolean(0), tbit.getBoolean(0)); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index fd770678d5943..3591dbeb41ffa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(BytesRefBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctBytesRefAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index a8169b5a901e1..38d4c7250debe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctDoubleAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 9f685f4672939..d4bc68500745e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctIntAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index 55b396aa627d5..06c6f67b356e0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctLongAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index 6929900c29ea1..f78a8773ccfcd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + DoubleVector max = ((DoubleBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), max.getDouble(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 1759442fbb12a..6f83ee7224879 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + IntVector max = ((IntBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.intValue(MaxIntAggregator.combine(state.intValue(), max.getInt(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index fe7d797faf10a..8826128a68837 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector max = ((LongBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(MaxLongAggregator.combine(state.longValue(), max.getLong(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index a2e8d8fbf592c..4bcf08ce0fa35 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index 21e99587a5d09..db9dbdab52244 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index 8c3aa95864aff..bf5fd51d7ed17 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index 1f9a8fb49fb2d..7d7544e5d8470 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + DoubleVector min = ((DoubleBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), min.getDouble(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index bbeba4c8374ab..0f2385cc120f9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + IntVector min = ((IntBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.intValue(MinIntAggregator.combine(state.intValue(), min.getInt(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 5299b505e124c..805729588158e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector min = ((LongBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(MinLongAggregator.combine(state.longValue(), min.getLong(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index f7560379e476d..cd7a5b5974442 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index d45ba7a1e350a..b9b1c2e90b768 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index dac045d814926..cc785ce55bb55 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index 5520c587555b3..354726f82b8f3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -91,15 +91,24 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block valueUncast = page.getBlock(channels.get(0)); + if (valueUncast.areAllValuesNull()) { return; } - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + DoubleVector value = ((DoubleBlock) valueUncast).asVector(); assert value.getPositionCount() == 1; - assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); + Block deltaUncast = page.getBlock(channels.get(1)); + if (deltaUncast.areAllValuesNull()) { + return; + } + DoubleVector delta = ((DoubleBlock) deltaUncast).asVector(); + assert delta.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; SumDoubleAggregator.combineIntermediate(state, value.getDouble(0), delta.getDouble(0), seen.getBoolean(0)); } @@ -111,7 +120,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } blocks[offset] = SumDoubleAggregator.evaluateFinal(state, driverContext); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index 1225b90bf09f7..e210429991aa6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -92,14 +92,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { return; } - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector sum = ((LongBlock) sumUncast).asVector(); assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(SumIntAggregator.combine(state.longValue(), sum.getLong(0))); state.seen(true); @@ -114,10 +118,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index 720e7ca9f3bbf..38d1b3de78265 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { return; } - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector sum = ((LongBlock) sumUncast).asVector(); assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(SumLongAggregator.combine(state.longValue(), sum.getLong(0))); state.seen(true); @@ -112,10 +116,10 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java index d083a48fffb7a..218af8fcb705e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java @@ -13,7 +13,6 @@ import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; @@ -33,10 +32,6 @@ public static void combine(SingleState current, boolean v) { current.bits |= v ? BIT_TRUE : BIT_FALSE; } - public static void combineStates(SingleState current, SingleState state) { - current.bits |= state.bits; - } - public static void combineIntermediate(SingleState current, boolean fbit, boolean tbit) { if (fbit) current.bits |= BIT_FALSE; if (tbit) current.bits |= BIT_TRUE; @@ -44,7 +39,7 @@ public static void combineIntermediate(SingleState current, boolean fbit, boolea public static Block evaluateFinal(SingleState state, DriverContext driverContext) { long result = ((state.bits & BIT_TRUE) >> 1) + (state.bits & BIT_FALSE); - return LongBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantLongBlockWith(result, 1); } public static GroupingState initGrouping(BigArrays bigArrays) { @@ -65,7 +60,7 @@ public static void combineIntermediate(GroupingState current, int groupId, boole } public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { - LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); + LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); long count = (state.bits.get(2 * group) ? 1 : 0) + (state.bits.get(2 * group + 1) ? 1 : 0); @@ -135,8 +130,8 @@ void combineStates(int currentGroupId, GroupingState state) { public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { assert blocks.length >= offset + 2; try ( - var fbitBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var tbitBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var fbitBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()); + var tbitBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java index 89ad27f1fef28..13a9e00bb28ab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java @@ -29,17 +29,13 @@ public static void combine(HllStates.SingleState current, BytesRef v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } public static Block evaluateFinal(HllStates.SingleState state, DriverContext driverContext) { long result = state.cardinality(); - return LongBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantLongBlockWith(result, 1); } public static HllStates.GroupingState initGrouping(BigArrays bigArrays, int precision) { @@ -64,7 +60,7 @@ public static void combineStates( } public static Block evaluateFinal(HllStates.GroupingState state, IntVector selected, DriverContext driverContext) { - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); long count = state.cardinality(group); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java index 86b3f9997246e..46a0d24cec8c4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java @@ -29,17 +29,13 @@ public static void combine(HllStates.SingleState current, double v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } public static Block evaluateFinal(HllStates.SingleState state, DriverContext driverContext) { long result = state.cardinality(); - return LongBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantLongBlockWith(result, 1); } public static HllStates.GroupingState initGrouping(BigArrays bigArrays, int precision) { @@ -64,7 +60,7 @@ public static void combineStates( } public static Block evaluateFinal(HllStates.GroupingState state, IntVector selected, DriverContext driverContext) { - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); long count = state.cardinality(group); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java index 993284b0c57c3..9c29eb98f2987 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java @@ -29,17 +29,13 @@ public static void combine(HllStates.SingleState current, int v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } public static Block evaluateFinal(HllStates.SingleState state, DriverContext driverContext) { long result = state.cardinality(); - return LongBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantLongBlockWith(result, 1); } public static HllStates.GroupingState initGrouping(BigArrays bigArrays, int precision) { @@ -64,7 +60,7 @@ public static void combineStates( } public static Block evaluateFinal(HllStates.GroupingState state, IntVector selected, DriverContext driverContext) { - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); long count = state.cardinality(group); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java index a09c8df3b0fc3..59570e2f5a7ef 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java @@ -29,17 +29,13 @@ public static void combine(HllStates.SingleState current, long v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } public static Block evaluateFinal(HllStates.SingleState state, DriverContext driverContext) { long result = state.cardinality(); - return LongBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantLongBlockWith(result, 1); } public static HllStates.GroupingState initGrouping(BigArrays bigArrays, int precision) { @@ -64,7 +60,7 @@ public static void combineStates( } public static Block evaluateFinal(HllStates.GroupingState state, IntVector selected, DriverContext driverContext) { - try (LongBlock.Builder builder = LongBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); long count = state.cardinality(group); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index 995dc5e15740f..5dba070172ae9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -181,7 +181,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - try (LongVector.Builder builder = LongVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int si = selected.getInt(i); builder.appendLong(state.hasValue(si) ? state.get(si) : 0); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java index 66844f002111e..a8102efa61746 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; @@ -179,7 +178,7 @@ void merge(int groupId, BytesRef other, int otherGroup) { @Override public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { assert blocks.length >= offset + 1; - try (var builder = BytesRefBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (var builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); builder.appendBytesRef(serializeHLL(group, hll)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java index 2d73c323e9556..db0d57b887008 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, double v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java index b4696f0ab1934..a57e28aebd437 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, int v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java index bbd9f1821b681..54340f809e4cd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java @@ -32,10 +32,6 @@ public static void combineIntermediate(QuantileStates.SingleState state, BytesRe state.add(inValue); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static Block evaluateFinal(QuantileStates.SingleState state, DriverContext driverContext) { return state.evaluateMedianAbsoluteDeviation(driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java index 3020a920ebddb..1cff8d89b7541 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, double v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java index 4ccd409cc8ccf..d93dc7099fffe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, int v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java index 2a0eb3a060930..9d900069d15ae 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, long v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java index 0b5b89425ed46..0ba7afb0d5e68 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java @@ -72,10 +72,6 @@ void add(double v) { digest.add(v); } - void add(SingleState other) { - digest.add(other.digest); - } - void add(BytesRef other) { digest.add(deserializeDigest(other)); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java index 4c2c38da28b75..5e46225a873f8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; @@ -42,10 +41,6 @@ public static void combine(SumState current, double value, double delta) { current.add(value, delta); } - public static void combineStates(SumState current, SumState state) { - current.add(state.value(), state.delta()); - } - public static void combineIntermediate(SumState state, double inValue, double inDelta, boolean seen) { if (seen) { combine(state, inValue, inDelta); @@ -63,7 +58,7 @@ public static void evaluateIntermediate(SumState state, DriverContext driverCont public static Block evaluateFinal(SumState state, DriverContext driverContext) { double result = state.value(); - return DoubleBlock.newConstantBlockWith(result, 1, driverContext.blockFactory()); + return driverContext.blockFactory().newConstantDoubleBlockWith(result, 1); } public static GroupingSumState initGrouping(BigArrays bigArrays) { @@ -95,9 +90,9 @@ public static void evaluateIntermediate( ) { assert blocks.length >= offset + 3; try ( - var valuesBuilder = DoubleBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var deltaBuilder = DoubleBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var seenBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var deltaBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var seenBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); @@ -117,7 +112,7 @@ public static void evaluateIntermediate( } public static Block evaluateFinal(GroupingSumState state, IntVector selected, DriverContext driverContext) { - try (DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int si = selected.getInt(i); if (state.hasValue(si) && si < state.values.size()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index 42f86580a228d..e81af4841d1a4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -10,7 +10,6 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.$Type$Array; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; $if(long)$ import org.elasticsearch.compute.data.IntVector; $endif$ @@ -73,14 +72,14 @@ $endif$ Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try ($Type$Vector.Builder builder = $Type$Vector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try ($Type$Vector.Builder builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.append$Type$(values.get(selected.getInt(i))); } return builder.build().asBlock(); } } - try ($Type$Block.Builder builder = $Type$Block.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try ($Type$Block.Builder builder = driverContext.blockFactory().new$Type$BlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (hasValue(group)) { @@ -111,8 +110,8 @@ $endif$ ) { assert blocks.length >= offset + 2; try ( - var valuesBuilder = $Type$Block.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()); - var hasValueBuilder = BooleanBlock.newBlockBuilder(selected.getPositionCount(), driverContext.blockFactory()) + var valuesBuilder = driverContext.blockFactory().new$Type$BlockBuilder(selected.getPositionCount()); + var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); @@ -124,7 +123,7 @@ $endif$ hasValueBuilder.appendBoolean(hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); - blocks[offset + 1] = hasValueBuilder.build(); + blocks[offset + 1] = hasValueBuilder.build().asBlock(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index ce53f0bb8e7f4..49b16198a5d77 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -65,7 +65,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(LongVector vector1, LongVector vector2) { int positions = vector1.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java index cb53bfa2738e3..8ce6ef9ab78ab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java @@ -11,8 +11,7 @@ import java.util.BitSet; -abstract class AbstractBlock implements Block { - private int references = 1; +abstract class AbstractBlock extends AbstractNonThreadSafeRefCounted implements Block { private final int positionCount; @Nullable @@ -104,52 +103,4 @@ public void allowPassingToDifferentDriver() { public final boolean isReleased() { return hasReferences() == false; } - - @Override - public final void incRef() { - if (isReleased()) { - throw new IllegalStateException("can't increase refCount on already released block [" + this + "]"); - } - references++; - } - - @Override - public final boolean tryIncRef() { - if (isReleased()) { - return false; - } - references++; - return true; - } - - @Override - public final boolean decRef() { - if (isReleased()) { - throw new IllegalStateException("can't release already released block [" + this + "]"); - } - - references--; - - if (references <= 0) { - closeInternal(); - return true; - } - return false; - } - - @Override - public final boolean hasReferences() { - return references >= 1; - } - - @Override - public final void close() { - decRef(); - } - - /** - * This is called when the number of references reaches zero. - * It must release any resources held by the block (adjusting circuit breakers if needed). - */ - protected abstract void closeInternal(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java new file mode 100644 index 0000000000000..2dfd8c3eca5ac --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; + +/** + * Releasable, non-threadsafe version of {@link org.elasticsearch.core.AbstractRefCounted}. + * Calls to {@link AbstractNonThreadSafeRefCounted#decRef()} and {@link AbstractNonThreadSafeRefCounted#close()} are equivalent. + */ +abstract class AbstractNonThreadSafeRefCounted implements RefCounted, Releasable { + private int references = 1; + + @Override + public final void incRef() { + if (hasReferences() == false) { + throw new IllegalStateException("can't increase refCount on already released object [" + this + "]"); + } + references++; + } + + @Override + public final boolean tryIncRef() { + if (hasReferences() == false) { + return false; + } + references++; + return true; + } + + @Override + public final boolean decRef() { + if (hasReferences() == false) { + throw new IllegalStateException("can't release already released object [" + this + "]"); + } + + references--; + + if (references <= 0) { + closeInternal(); + return true; + } + return false; + } + + @Override + public final boolean hasReferences() { + return references >= 1; + } + + @Override + public final void close() { + decRef(); + } + + /** + * This is called when the number of references reaches zero. + * This is where resources should be released (adjusting circuit breakers if needed). + */ + protected abstract void closeInternal(); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index 33ef14cfb4ad8..cc9727b751411 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -10,11 +10,10 @@ /** * A dense Vector of single values. */ -abstract class AbstractVector implements Vector { +abstract class AbstractVector extends AbstractNonThreadSafeRefCounted implements Vector { private final int positionCount; private BlockFactory blockFactory; - protected boolean released; protected AbstractVector(int positionCount, BlockFactory blockFactory) { this.positionCount = positionCount; @@ -41,16 +40,12 @@ public void allowPassingToDifferentDriver() { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + protected void closeInternal() { blockFactory.adjustBreaker(-ramBytesUsed(), true); } @Override public final boolean isReleased() { - return released; + return hasReferences() == false; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 964e510de9a20..c89a0ce260c67 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -159,25 +159,6 @@ default boolean mvSortedAscending() { */ Block expand(); - /** - * {@return a constant null block with the given number of positions, using the non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantNullBlock} - */ - // Eventually, this should use the GLOBAL breaking instance - @Deprecated - static Block constantNullBlock(int positions) { - return constantNullBlock(positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * {@return a constant null block with the given number of positions}. - * @deprecated use {@link BlockFactory#newConstantNullBlock} - */ - @Deprecated - static Block constantNullBlock(int positions, BlockFactory blockFactory) { - return blockFactory.newConstantNullBlock(positions); - } - /** * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. * This is {@link Releasable} and should be released after building the block or if building the block fails. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 595c9bdf45ee3..dccbb03c0e48e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; -import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; @@ -28,11 +27,6 @@ public class BlockFactory { public static final String MAX_BLOCK_PRIMITIVE_ARRAY_SIZE_SETTING = "esql.block_factory.max_block_primitive_array_size"; public static final ByteSizeValue DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE = ByteSizeValue.ofKb(512); - private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( - new NoopCircuitBreaker("noop-esql-breaker"), - BigArrays.NON_RECYCLING_INSTANCE - ); - private final CircuitBreaker breaker; private final BigArrays bigArrays; @@ -54,13 +48,6 @@ public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue m this.maxPrimitiveArrayBytes = maxPrimitiveArraySize.getBytes(); } - /** - * Returns the Non-Breaking block factory. - */ - public static BlockFactory getNonBreakingInstance() { - return NON_BREAKING; - } - public static BlockFactory getInstance(CircuitBreaker breaker, BigArrays bigArrays) { return new BlockFactory(breaker, bigArrays, DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE, null); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 063444908f2d9..6bc598b2f0e82 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -232,7 +232,7 @@ private static BytesRef spatialToBytesRef(Object val) { public static Block constantBlock(BlockFactory blockFactory, Object val, int size) { if (val == null) { - return Block.constantNullBlock(size); + return blockFactory.newConstantNullBlock(size); } return constantBlock(blockFactory, fromJava(val.getClass()), val, size); } @@ -240,12 +240,12 @@ public static Block constantBlock(BlockFactory blockFactory, Object val, int siz // TODO: allow null values private static Block constantBlock(BlockFactory blockFactory, ElementType type, Object val, int size) { return switch (type) { - case NULL -> Block.constantNullBlock(size); - case LONG -> LongBlock.newConstantBlockWith((long) val, size, blockFactory); - case INT -> IntBlock.newConstantBlockWith((int) val, size, blockFactory); - case BYTES_REF -> BytesRefBlock.newConstantBlockWith(spatialToBytesRef(val), size, blockFactory); - case DOUBLE -> DoubleBlock.newConstantBlockWith((double) val, size, blockFactory); - case BOOLEAN -> BooleanBlock.newConstantBlockWith((boolean) val, size, blockFactory); + case NULL -> blockFactory.newConstantNullBlock(size); + case LONG -> blockFactory.newConstantLongBlockWith((long) val, size); + case INT -> blockFactory.newConstantIntBlockWith((int) val, size); + case BYTES_REF -> blockFactory.newConstantBytesRefBlockWith(spatialToBytesRef(val), size); + case DOUBLE -> blockFactory.newConstantDoubleBlockWith((double) val, size); + case BOOLEAN -> blockFactory.newConstantBooleanBlockWith((boolean) val, size); default -> throw new UnsupportedOperationException("unsupported element type [" + type + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 29e39f43cddc2..639e1c298291f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -19,15 +19,10 @@ /** * Block implementation representing a constant null value. */ -public final class ConstantNullBlock extends AbstractBlock implements BooleanBlock, IntBlock, LongBlock, DoubleBlock, BytesRefBlock { +final class ConstantNullBlock extends AbstractBlock implements BooleanBlock, IntBlock, LongBlock, DoubleBlock, BytesRefBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantNullBlock.class); - // Eventually, this should use the GLOBAL breaking instance - ConstantNullBlock(int positionCount) { - this(positionCount, BlockFactory.getNonBreakingInstance()); - } - ConstantNullBlock(int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); } @@ -83,8 +78,9 @@ public String getWriteableName() { return "ConstantNullBlock"; } - static ConstantNullBlock of(StreamInput in) throws IOException { - return new ConstantNullBlock(in.readVInt()); + static Block of(StreamInput in) throws IOException { + BlockFactory blockFactory = ((BlockStreamInput) in).blockFactory(); + return blockFactory.newConstantNullBlock(in.readVInt()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index d843bcdc2493a..f8e3428d6fee7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -79,8 +79,8 @@ public void closeInternal() { /** * A builder the for {@link DocBlock}. */ - public static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return new Builder(estimatedSize, blockFactory); + public static Builder newBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + return new Builder(blockFactory, estimatedSize); } public static class Builder implements Block.Builder { @@ -88,10 +88,10 @@ public static class Builder implements Block.Builder { private final IntVector.Builder segments; private final IntVector.Builder docs; - private Builder(int estimatedSize, BlockFactory blockFactory) { - shards = IntVector.newVectorBuilder(estimatedSize, blockFactory); - segments = IntVector.newVectorBuilder(estimatedSize, blockFactory); - docs = IntVector.newVectorBuilder(estimatedSize, blockFactory); + private Builder(BlockFactory blockFactory, int estimatedSize) { + shards = blockFactory.newIntVectorBuilder(estimatedSize); + segments = blockFactory.newIntVectorBuilder(estimatedSize); + docs = blockFactory.newIntVectorBuilder(estimatedSize); } public Builder appendShard(int shard) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 427ddd9219394..5f365bfc3eae1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -173,7 +173,21 @@ public DocBlock asBlock() { @Override public DocVector filter(int... positions) { - return new DocVector(shards.filter(positions), segments.filter(positions), docs.filter(positions), null); + IntVector filteredShards = null; + IntVector filteredSegments = null; + IntVector filteredDocs = null; + DocVector result = null; + try { + filteredShards = shards.filter(positions); + filteredSegments = segments.filter(positions); + filteredDocs = docs.filter(positions); + result = new DocVector(filteredShards, filteredSegments, filteredDocs, null); + return result; + } finally { + if (result == null) { + Releasables.closeExpectNoException(filteredShards, filteredSegments, filteredDocs); + } + } } @Override @@ -228,8 +242,7 @@ public void allowPassingToDifferentDriver() { } @Override - public void close() { - released = true; - Releasables.closeExpectNoException(shards.asBlock(), segments.asBlock(), docs.asBlock()); // Ugh! we always close blocks + public void closeInternal() { + Releasables.closeExpectNoException(shards, segments, docs); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 42fe73c3ed82d..a566e8930949a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -15,16 +15,16 @@ * The type of elements in {@link Block} and {@link Vector} */ public enum ElementType { - BOOLEAN(BooleanBlock::newBlockBuilder), - INT(IntBlock::newBlockBuilder), - LONG(LongBlock::newBlockBuilder), - DOUBLE(DoubleBlock::newBlockBuilder), + BOOLEAN(BlockFactory::newBooleanBlockBuilder), + INT(BlockFactory::newIntBlockBuilder), + LONG(BlockFactory::newLongBlockBuilder), + DOUBLE(BlockFactory::newDoubleBlockBuilder), /** * Blocks containing only null values. */ - NULL((estimatedSize, blockFactory) -> new ConstantNullBlock.Builder(blockFactory)), + NULL((blockFactory, estimatedSize) -> new ConstantNullBlock.Builder(blockFactory)), - BYTES_REF(BytesRefBlock::newBlockBuilder), + BYTES_REF(BlockFactory::newBytesRefBlockBuilder), /** * Blocks that reference individual lucene documents. @@ -34,10 +34,10 @@ public enum ElementType { /** * Intermediate blocks which don't support retrieving elements. */ - UNKNOWN((estimatedSize, blockFactory) -> { throw new UnsupportedOperationException("can't build null blocks"); }); + UNKNOWN((blockFactory, estimatedSize) -> { throw new UnsupportedOperationException("can't build null blocks"); }); - interface BuilderSupplier { - Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory); + private interface BuilderSupplier { + Block.Builder newBlockBuilder(BlockFactory blockFactory, int estimatedSize); } private final BuilderSupplier builder; @@ -46,20 +46,11 @@ interface BuilderSupplier { this.builder = builder; } - /** - * Create a new {@link Block.Builder} for blocks of this type. - * @deprecated use {@link #newBlockBuilder(int, BlockFactory)} - */ - @Deprecated - public Block.Builder newBlockBuilder(int estimatedSize) { - return builder.newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Create a new {@link Block.Builder} for blocks of this type. */ public Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return builder.newBlockBuilder(estimatedSize, blockFactory); + return builder.newBlockBuilder(blockFactory, estimatedSize); } public static ElementType fromJava(Class type) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 1d8c0d35b4e7d..fc09f636ac700 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,12 +8,13 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; /** * A dense Vector of single values. */ -public interface Vector extends Accountable, Releasable { +public interface Vector extends Accountable, RefCounted, Releasable { /** * {@return Returns a new Block containing this vector.} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 778ebab5a44c8..01a6d70d63795 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -15,25 +15,23 @@ import org.elasticsearch.core.Releasables; $else$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; $endif$ import java.util.BitSet; /** - * Block implementation that stores an array of $type$. + * Block implementation that stores values in a {@link $Type$ArrayVector}. +$if(BytesRef)$ + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. +$endif$ * This class is generated. Do not edit it. */ final class $Type$ArrayBlock extends AbstractArrayBlock implements $Type$Block { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayBlock.class); -$if(BytesRef)$ - private final BytesRefArray values; - -$else$ - private final $type$[] values; -$endif$ + private final $Type$ArrayVector vector; $Type$ArrayBlock( $if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, @@ -44,7 +42,11 @@ $endif$ BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + $if(BytesRef)$ + this.vector = new BytesRefArrayVector(values, (int) values.size(), blockFactory); + $else$ + this.vector = new $Type$ArrayVector(values, values.length, blockFactory); + $endif$ } @Override @@ -55,10 +57,10 @@ $endif$ @Override $if(BytesRef)$ public BytesRef getBytesRef(int valueIndex, BytesRef dest) { - return values.get(valueIndex, dest); + return vector.getBytesRef(valueIndex, dest); $else$ public $type$ get$Type$(int valueIndex) { - return values[valueIndex]; + return vector.get$Type$(valueIndex); $endif$ } @@ -100,7 +102,7 @@ $endif$ incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ @@ -124,14 +126,13 @@ $endif$ } } - public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -154,23 +155,20 @@ $endif$ + getPositionCount() + ", mvOrdering=" + mvOrdering() -$if(BytesRef)$ - + ", values=" - + values.size() -$else$ - + ", values=" - + Arrays.toString(values) -$endif$ + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - $if(BytesRef)$ - blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); - Releasables.closeExpectNoException(values); - $else$ - blockFactory().adjustBreaker(-ramBytesUsed(), true); - $endif$ + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 04afc9de91647..2608816f91f19 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -21,6 +21,9 @@ $endif$ /** * Vector implementation that stores an array of $type$ values. +$if(BytesRef)$ + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. +$endif$ * This class is generated. Do not edit it. */ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { @@ -117,11 +120,9 @@ $endif$ $if(BytesRef)$ @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BytesRefArray} is adjusted outside + // of this class. blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st index 4fe6ce9304a68..4ffce6b66fc10 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st @@ -14,13 +14,14 @@ import org.elasticsearch.core.Releasables; import java.util.BitSet; /** - * Block implementation that stores values in a $Type$Array. + * Block implementation that stores values in a {@link $Type$BigArrayVector}. Does not take ownership of the given + * {@link $Array$} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Type$Block { private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this - private final $Array$ values; + private final $Type$BigArrayVector vector; public $Type$BigArrayBlock( $Array$ values, @@ -31,7 +32,7 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new $Type$BigArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -41,7 +42,7 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty @Override public $type$ get$Type$(int valueIndex) { - return values.get(valueIndex); + return vector.get$Type$(valueIndex); } @Override @@ -79,7 +80,7 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -96,10 +97,13 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty } } + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); } @Override @@ -123,13 +127,19 @@ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Ty + ", mvOrdering=" + mvOrdering() + ", ramBytesUsed=" - + values.ramBytesUsed() + + vector.ramBytesUsed() + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + RamUsageEstimator.sizeOf(values), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 487078389c24b..addca35643dd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -12,7 +12,8 @@ import org.elasticsearch.common.util.$Array$; import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed $Type$Array. + * Vector implementation that defers to an enclosed {@link $if(boolean)$Bit$else$$Type$$endif$Array}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class $Type$BigArrayVector extends AbstractVector implements $Type$Vector, Releasable { @@ -74,11 +75,9 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index eddc876f8536e..c5fd7e8302776 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -202,44 +202,6 @@ $endif$ return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a builder. - * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} - */ - @Deprecated - static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.new$Type$BlockBuilder(estimatedSize); - } - - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static $Type$Block newConstantBlockWith($type$ value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - - /** - * Returns a constant block. - * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} - */ - @Deprecated - static $Type$Block newConstantBlockWith($type$ value, int positions, BlockFactory blockFactory) { - return blockFactory.newConstant$Type$BlockWith(value, positions); - } - /** * Builder for {@link $Type$Block} */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index ac3051a96b63b..0134eb88301a0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -97,13 +97,4 @@ $endif$ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 6ec41ccdc6ab9..c303a8391ad18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -143,46 +143,6 @@ $endif$ } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - -$if(BytesRef)$ - /** - * Creates a builder that grows as needed. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ -$else$ - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ -$endif$ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.new$Type$VectorBuilder(estimatedSize); - } - -$if(BytesRef)$ -$else$ - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#new$Type$VectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.new$Type$VectorFixedBuilder(size); - } -$endif$ - $if(int)$ /** Create a vector for a range of ints. */ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockFactory) { @@ -197,7 +157,11 @@ $endif$ /** * A builder that grows as needed. */ +$if(BytesRef)$ sealed interface Builder extends Vector.Builder permits $Type$VectorBuilder { +$else$ + sealed interface Builder extends Vector.Builder permits $Type$VectorBuilder, FixedBuilder { +$endif$ /** * Appends a $type$ to the current entry. */ @@ -212,14 +176,12 @@ $else$ /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits $Type$VectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits $Type$VectorFixedBuilder { /** * Appends a $type$ to the current entry. */ - FixedBuilder append$Type$($type$ value); - @Override - $Type$Vector build(); + FixedBuilder append$Type$($type$ value); } $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 8269f8f8e57ee..4bc3c66b65743 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -13,7 +13,7 @@ $endif$ import org.elasticsearch.core.Releasables; /** - * Block view of a $Type$Vector. + * Block view of a {@link $Type$Vector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class $Type$VectorBlock extends AbstractVectorBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 75bd230638928..4ed32d6552497 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -160,8 +160,8 @@ public Page getOutput() { LongBlock count = null; BooleanBlock seen = null; try { - count = LongBlock.newConstantBlockWith(totalHits, PAGE_SIZE, blockFactory); - seen = BooleanBlock.newConstantBlockWith(true, PAGE_SIZE, blockFactory); + count = blockFactory.newConstantLongBlockWith(totalHits, PAGE_SIZE); + seen = blockFactory.newConstantBooleanBlockWith(true, PAGE_SIZE); page = new Page(PAGE_SIZE, count, seen); } finally { if (page == null) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 6536b08cd2419..21b2a4cfaeb0b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,8 +31,13 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.HashSet; import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; import java.util.function.Function; +import java.util.stream.Collectors; public abstract class LuceneOperator extends SourceOperator { private static final Logger logger = LogManager.getLogger(LuceneOperator.class); @@ -40,10 +46,16 @@ public abstract class LuceneOperator extends SourceOperator { protected final BlockFactory blockFactory; - private int processSlices; + /** + * Count of the number of slices processed. + */ + private int processedSlices; final int maxPageSize; private final LuceneSliceQueue sliceQueue; + private final Set processedQueries = new HashSet<>(); + private final Set processedShards = new HashSet<>(); + private LuceneSlice currentSlice; private int sliceIndex; @@ -52,7 +64,7 @@ public abstract class LuceneOperator extends SourceOperator { int pagesEmitted; boolean doneCollecting; - public LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { + protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.sliceQueue = sliceQueue; @@ -73,18 +85,23 @@ LuceneScorer getCurrentOrLoadNextScorer() { if (currentSlice == null) { doneCollecting = true; return null; - } else { - processSlices++; } if (currentSlice.numLeaves() == 0) { continue; } + processedSlices++; + processedShards.add( + currentSlice.searchContext().getSearchExecutionContext().getFullyQualifiedIndex().getName() + + ":" + + currentSlice.searchContext().getSearchExecutionContext().getShardId() + ); } final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++); logger.trace("Starting {}", partialLeaf); final LeafReaderContext leaf = partialLeaf.leafReaderContext(); if (currentScorer == null || currentScorer.leafReaderContext() != leaf) { final Weight weight = currentSlice.weight().get(); + processedQueries.add(weight.getQuery()); currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf); } assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc(); @@ -190,6 +207,8 @@ public static class Status implements Operator.Status { ); private final int processedSlices; + private final Set processedQueries; + private final Set processedShards; private final int totalSlices; private final int pagesEmitted; private final int sliceIndex; @@ -198,7 +217,9 @@ public static class Status implements Operator.Status { private final int current; private Status(LuceneOperator operator) { - processedSlices = operator.processSlices; + processedSlices = operator.processedSlices; + processedQueries = operator.processedQueries.stream().map(Query::toString).collect(Collectors.toCollection(TreeSet::new)); + processedShards = new TreeSet<>(operator.processedShards); sliceIndex = operator.sliceIndex; totalSlices = operator.sliceQueue.totalSlices(); LuceneSlice slice = operator.currentSlice; @@ -219,8 +240,20 @@ private Status(LuceneOperator operator) { pagesEmitted = operator.pagesEmitted; } - Status(int processedSlices, int sliceIndex, int totalSlices, int pagesEmitted, int sliceMin, int sliceMax, int current) { + Status( + int processedSlices, + Set processedQueries, + Set processedShards, + int sliceIndex, + int totalSlices, + int pagesEmitted, + int sliceMin, + int sliceMax, + int current + ) { this.processedSlices = processedSlices; + this.processedQueries = processedQueries; + this.processedShards = processedShards; this.sliceIndex = sliceIndex; this.totalSlices = totalSlices; this.pagesEmitted = pagesEmitted; @@ -231,6 +264,13 @@ private Status(LuceneOperator operator) { Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + processedQueries = in.readCollectionAsSet(StreamInput::readString); + processedShards = in.readCollectionAsSet(StreamInput::readString); + } else { + processedQueries = Collections.emptySet(); + processedShards = Collections.emptySet(); + } sliceIndex = in.readVInt(); totalSlices = in.readVInt(); pagesEmitted = in.readVInt(); @@ -242,6 +282,10 @@ private Status(LuceneOperator operator) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + out.writeCollection(processedQueries, StreamOutput::writeString); + out.writeCollection(processedShards, StreamOutput::writeString); + } out.writeVInt(sliceIndex); out.writeVInt(totalSlices); out.writeVInt(pagesEmitted); @@ -259,6 +303,14 @@ public int processedSlices() { return processedSlices; } + public Set processedQueries() { + return processedQueries; + } + + public Set processedShards() { + return processedShards; + } + public int sliceIndex() { return sliceIndex; } @@ -287,6 +339,8 @@ public int current() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("processed_slices", processedSlices); + builder.field("processed_queries", processedQueries); + builder.field("processed_shards", processedShards); builder.field("slice_index", sliceIndex); builder.field("total_slices", totalSlices); builder.field("pages_emitted", pagesEmitted); @@ -302,6 +356,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; return processedSlices == status.processedSlices + && processedQueries.equals(status.processedQueries) + && processedShards.equals(status.processedShards) && sliceIndex == status.sliceIndex && totalSlices == status.totalSlices && pagesEmitted == status.pagesEmitted diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 7b2b276a619c6..b636e4aba8a5e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -95,7 +95,7 @@ public LuceneSourceOperator(BlockFactory blockFactory, int maxPageSize, LuceneSl super(blockFactory, maxPageSize, sliceQueue); this.minPageSize = Math.max(1, maxPageSize / 2); this.remainingDocs = limit; - this.docsBuilder = IntVector.newVectorBuilder(Math.min(limit, maxPageSize), blockFactory); + this.docsBuilder = blockFactory.newIntVectorBuilder(Math.min(limit, maxPageSize)); this.leafCollector = new LeafCollector() { @Override public void setScorer(Scorable scorer) { @@ -149,10 +149,10 @@ public Page getOutput() { IntBlock leaf = null; IntVector docs = null; try { - shard = IntBlock.newConstantBlockWith(scorer.shardIndex(), currentPagePos, blockFactory); - leaf = IntBlock.newConstantBlockWith(scorer.leafReaderContext().ord, currentPagePos, blockFactory); + shard = blockFactory.newConstantIntBlockWith(scorer.shardIndex(), currentPagePos); + leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos); docs = docsBuilder.build(); - docsBuilder = IntVector.newVectorBuilder(Math.min(remainingDocs, maxPageSize), blockFactory); + docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); page = new Page(currentPagePos, new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock()); } finally { if (page == null) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 9624fa48ef20d..7f08c8ca66821 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -200,8 +200,8 @@ private Page emit(boolean startEmitting) { IntVector docs = null; Page page = null; try ( - IntVector.Builder currentSegmentBuilder = IntVector.newVectorBuilder(size, blockFactory); - IntVector.Builder currentDocsBuilder = IntVector.newVectorBuilder(size, blockFactory) + IntVector.Builder currentSegmentBuilder = blockFactory.newIntVectorFixedBuilder(size); + IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size) ) { int start = offset; offset += size; @@ -213,7 +213,7 @@ private Page emit(boolean startEmitting) { currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment } - shard = IntBlock.newConstantBlockWith(perShardCollector.shardIndex, size, blockFactory); + shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardIndex, size); segments = currentSegmentBuilder.build(); docs = currentDocsBuilder.build(); page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java deleted file mode 100644 index 04dbcd91c18c8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.LeafFieldData; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.script.field.TextDocValuesField; -import org.elasticsearch.search.aggregations.support.ValuesSource; - -public class TextValueSource extends ValuesSource.Bytes { - - private final IndexFieldData indexFieldData; - - public TextValueSource(IndexFieldData indexFieldData) { - this.indexFieldData = indexFieldData; - } - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext leafReaderContext) { - String fieldName = indexFieldData.getFieldName(); - LeafFieldData fieldData = indexFieldData.load(leafReaderContext); - return ((TextDocValuesFieldWrapper) fieldData.getScriptFieldFactory(fieldName)).bytesValues(); - } - - /** Wrapper around TextDocValuesField that provides access to the SortedBinaryDocValues. */ - static final class TextDocValuesFieldWrapper extends TextDocValuesField { - TextDocValuesFieldWrapper(SortedBinaryDocValues input, String name) { - super(input, name); - } - - SortedBinaryDocValues bytesValues() { - return input; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 2a6a3c9b6210b..10f23ed29094f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -75,27 +75,22 @@ interface Factory { public static final ExpressionEvaluator.Factory CONSTANT_NULL_FACTORY = new ExpressionEvaluator.Factory() { @Override public ExpressionEvaluator get(DriverContext driverContext) { - return CONSTANT_NULL; - } + return new ExpressionEvaluator() { + @Override + public Block eval(Page page) { + return driverContext.blockFactory().newConstantNullBlock(page.getPositionCount()); + } - @Override - public String toString() { - return CONSTANT_NULL.toString(); - } - }; + @Override + public void close() { - public static final ExpressionEvaluator CONSTANT_NULL = new ExpressionEvaluator() { - @Override - public Block eval(Page page) { - return Block.constantNullBlock(page.getPositionCount()); + } + }; } @Override public String toString() { return "ConstantNull"; } - - @Override - public void close() {} }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java index f3570bf7b853b..d6a908306e2f4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java @@ -46,7 +46,7 @@ public BooleanBlock dedupeToBlock(BlockFactory blockFactory) { block.incRef(); return block; } - try (BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 4fb90ddb57e25..a895525add46f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -475,7 +475,7 @@ private static class ValuesAggregator implements Releasable { DriverContext driverContext ) { this.extractor = new ValuesSourceReaderOperator( - BlockFactory.getNonBreakingInstance(), + driverContext.blockFactory(), List.of(new ValuesSourceReaderOperator.FieldInfo(groupingField, blockLoaders)), shardContexts, docChannel diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java index ff124021ea3ad..4b4379eb6a4d8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java @@ -22,8 +22,7 @@ public record RowOperatorFactory(List objects) implements SourceOperator @Override public SourceOperator get(DriverContext driverContext) { - // We aren't yet ready to use the read block factory - return new RowOperator(BlockFactory.getNonBreakingInstance(), objects); + return new RowOperator(driverContext.blockFactory(), objects); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java index 4ffa530bc5d3a..ec61408954219 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java @@ -65,7 +65,7 @@ protected Page process(Page page) { BytesRefBlock.Builder[] blockBuilders = new BytesRefBlock.Builder[fieldNames.length]; try { for (int i = 0; i < fieldNames.length; i++) { - blockBuilders[i] = BytesRefBlock.newBlockBuilder(rowsCount, driverContext.blockFactory()); + blockBuilders[i] = driverContext.blockFactory().newBytesRefBlockBuilder(rowsCount); } try (BytesRefBlock input = (BytesRefBlock) inputEvaluator.eval(page)) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java index 313ec0b682602..c1029db4c32e4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.DoubleArray; @@ -18,7 +19,7 @@ public class ThrowingDriverContext extends DriverContext { public ThrowingDriverContext() { - super(new ThrowingBigArrays(), BlockFactory.getNonBreakingInstance()); + super(new ThrowingBigArrays(), BlockFactory.getInstance(new NoopCircuitBreaker("throwing-context"), new ThrowingBigArrays())); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st index d4d08d22f3eb9..d55f1c4cb43ec 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st @@ -71,7 +71,7 @@ $endif$ block.incRef(); return block; } - try ($Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -125,7 +125,7 @@ $endif$ block.incRef(); return block; } - try ($Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); @@ -159,7 +159,7 @@ $endif$ block.incRef(); return block; } - try ($Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount(), blockFactory)) { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int count = block.getValueCount(p); int first = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index ab9582b20d4aa..3173b716467be 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ChannelActionListener; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,10 +24,9 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; @@ -43,7 +41,7 @@ /** * {@link ExchangeService} is responsible for exchanging pages between exchange sinks and sources on the same or different nodes. - * It holds a map of {@link ExchangeSourceHandler} and {@link ExchangeSinkHandler} instances for each node in the cluster. + * It holds a map of {@link ExchangeSinkHandler} instances for each node in the cluster to serve {@link ExchangeRequest}s * To connect exchange sources to exchange sinks, use the {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, int)} method. */ public final class ExchangeService extends AbstractLifecycleComponent { @@ -66,7 +64,6 @@ public final class ExchangeService extends AbstractLifecycleComponent { private final BlockFactory blockFactory; private final Map sinks = ConcurrentCollections.newConcurrentMap(); - private final Map sources = ConcurrentCollections.newConcurrentMap(); private final InactiveSinksReaper inactiveSinksReaper; @@ -125,35 +122,22 @@ public void finishSinkHandler(String exchangeId, Exception failure) { } } - /** - * Creates an {@link ExchangeSourceHandler} for the specified exchange id. - * - * @throws IllegalStateException if a source handler for the given id already exists - */ - public ExchangeSourceHandler createSourceHandler(String exchangeId, int maxBufferSize, String fetchExecutor) { - ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(maxBufferSize, threadPool.executor(fetchExecutor)); - if (sources.putIfAbsent(exchangeId, sourceHandler) != null) { - throw new IllegalStateException("source exchanger for id [" + exchangeId + "] already exists"); - } - sourceHandler.addCompletionListener(ActionListener.releasing(() -> sources.remove(exchangeId))); - return sourceHandler; - } - /** * Opens a remote sink handler on the remote node for the given session ID. */ public static void openExchange( TransportService transportService, - DiscoveryNode targetNode, + Transport.Connection connection, String sessionId, int exchangeBuffer, Executor responseExecutor, ActionListener listener ) { transportService.sendRequest( - targetNode, + connection, OPEN_EXCHANGE_ACTION_NAME, new OpenExchangeRequest(sessionId, exchangeBuffer), + TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener.map(unused -> null), in -> TransportResponse.Empty.INSTANCE, responseExecutor) ); } @@ -198,10 +182,6 @@ public void messageReceived(ExchangeRequest request, TransportChannel channel, T if (sinkHandler == null) { listener.onResponse(new ExchangeResponse(null, true)); } else { - // the data-node request hasn't arrived yet; use the task framework to cancel the request if needed. - if (sinkHandler.hasData() == false) { - ((CancellableTask) task).addListener(() -> sinkHandler.onFailure(new TaskCancelledException("task cancelled"))); - } sinkHandler.fetchPageAsync(request.sourcesFinished(), listener); } } @@ -251,16 +231,16 @@ protected void runInternal() { * @param parentTask the parent task that initialized the ESQL request * @param exchangeId the exchange ID * @param transportService the transport service - * @param remoteNode the node where the remote exchange sink is located + * @param conn the connection to the remote node where the remote exchange sink is located */ - public RemoteSink newRemoteSink(Task parentTask, String exchangeId, TransportService transportService, DiscoveryNode remoteNode) { - return new TransportRemoteSink(transportService, blockFactory, remoteNode, parentTask, exchangeId, executor); + public RemoteSink newRemoteSink(Task parentTask, String exchangeId, TransportService transportService, Transport.Connection conn) { + return new TransportRemoteSink(transportService, blockFactory, conn, parentTask, exchangeId, executor); } record TransportRemoteSink( TransportService transportService, BlockFactory blockFactory, - DiscoveryNode node, + Transport.Connection connection, Task parentTask, String exchangeId, Executor responseExecutor @@ -269,7 +249,7 @@ record TransportRemoteSink( @Override public void fetchPageAsync(boolean allSourcesFinished, ActionListener listener) { transportService.sendChildRequest( - node, + connection, EXCHANGE_ACTION_NAME, new ExchangeRequest(exchangeId, allSourcesFinished), parentTask, @@ -285,7 +265,7 @@ public void fetchPageAsync(boolean allSourcesFinished, ActionListener input = CannedSourceOperator.collectPages(simpleInput(blockFactory, end)); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); try ( Driver d = new Driver( @@ -120,7 +121,7 @@ public final void testMultivalued() { List input = CannedSourceOperator.collectPages( new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end), blockFactory) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); assertSimpleOutput(origInput, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator(), driverContext)); } @@ -134,7 +135,7 @@ public final void testMultivaluedWithNulls() { blockFactory ) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); assertSimpleOutput(origInput, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator(), driverContext)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 6afd285987696..730a8d1ee66a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.ForkingOperatorTestCase; @@ -146,8 +147,8 @@ protected final void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 32)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofBytes(100); } public final void testNullGroupsAndValues() { @@ -157,7 +158,7 @@ public final void testNullGroupsAndValues() { List input = CannedSourceOperator.collectPages( new NullInsertingSourceOperator(simpleInput(driverContext.blockFactory(), end), blockFactory) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -171,7 +172,7 @@ public final void testNullGroups() { BlockFactory blockFactory = driverContext.blockFactory(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(blockFactory, end), blockFactory)); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -189,7 +190,7 @@ public void testAllKeyNulls() { input.add(p); } else { Block[] blocks = new Block[p.getBlockCount()]; - blocks[0] = Block.constantNullBlock(p.getPositionCount(), blockFactory); + blocks[0] = blockFactory.newConstantNullBlock(p.getPositionCount()); for (int i = 1; i < blocks.length; i++) { blocks[i] = p.getBlock(i); } @@ -197,7 +198,7 @@ public void testAllKeyNulls() { input.add(new Page(blocks)); } } - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -232,7 +233,7 @@ public final void testNullValues() { BlockFactory blockFactory = driverContext.blockFactory(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end), blockFactory)); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -246,7 +247,7 @@ public final void testNullValuesInitialIntermediateFinal() { BlockFactory blockFactory = driverContext.blockFactory(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end), blockFactory)); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -278,7 +279,7 @@ public final void testMultivalued() { List input = CannedSourceOperator.collectPages( mergeValues(simpleInput(driverContext.blockFactory(), end), driverContext.blockFactory()) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -294,7 +295,7 @@ public final void testMulitvaluedNullGroupsAndValues() { List input = CannedSourceOperator.collectPages( new NullInsertingSourceOperator(mergeValues(simpleInput(driverContext.blockFactory(), end), blockFactory), blockFactory) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -309,7 +310,7 @@ public void testMulitvaluedNullGroup() { int end = between(1, 2); // TODO revert var inputOperator = nullGroups(mergeValues(simpleInput(driverContext.blockFactory(), end), blockFactory), blockFactory); List input = CannedSourceOperator.collectPages(inputOperator); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -325,7 +326,7 @@ public final void testMulitvaluedNullValues() { List input = CannedSourceOperator.collectPages( nullValues(mergeValues(simpleInput(blockFactory, end), blockFactory), blockFactory) ); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive( simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator(), @@ -366,18 +367,20 @@ public final void testNullOnlyInputInitialIntermediateFinal() { * Run the aggregation passing only null values. */ private void assertNullOnly(List operators, DriverContext driverContext) { - LongBlock.Builder groupBuilder = LongBlock.newBlockBuilder(1); - if (randomBoolean()) { - groupBuilder.appendLong(1); - } else { - groupBuilder.appendNull(); - } - List source = List.of(new Page(groupBuilder.build(), Block.constantNullBlock(1))); - List results = drive(operators, source.iterator(), driverContext); + BlockFactory blockFactory = driverContext.blockFactory(); + try (var groupBuilder = blockFactory.newLongBlockBuilder(1)) { + if (randomBoolean()) { + groupBuilder.appendLong(1); + } else { + groupBuilder.appendNull(); + } + List source = List.of(new Page(groupBuilder.build(), blockFactory.newConstantNullBlock(1))); + List results = drive(operators, source.iterator(), driverContext); - assertThat(results, hasSize(1)); - Block resultBlock = results.get(0).getBlock(1); - assertOutputFromNullOnly(resultBlock, 0); + assertThat(results, hasSize(1)); + Block resultBlock = results.get(0).getBlock(1); + assertOutputFromNullOnly(resultBlock, 0); + } } public final void testNullSome() { @@ -465,7 +468,7 @@ protected Block merge(int blockIndex, Block block) { if (blockIndex != 0) { return super.merge(blockIndex, block); } - Block.Builder builder = block.elementType().newBlockBuilder(block.getPositionCount() / 2); + Block.Builder builder = block.elementType().newBlockBuilder(block.getPositionCount() / 2, blockFactory); for (int p = 0; p + 1 < block.getPositionCount(); p += 2) { builder.copyFrom(block, p, p + 1); } @@ -565,7 +568,7 @@ public AddInput prepareProcessPage(SeenGroupIds ignoredSeenGroupIds, Page page) @Override public void add(int positionOffset, IntBlock groupIds) { for (int offset = 0; offset < groupIds.getPositionCount(); offset += emitChunkSize) { - IntBlock.Builder builder = IntBlock.newBlockBuilder(emitChunkSize); + IntBlock.Builder builder = blockFactory().newIntBlockBuilder(emitChunkSize); int endP = Math.min(groupIds.getPositionCount(), offset + emitChunkSize); for (int p = offset; p < endP; p++) { int start = groupIds.getFirstValueIndex(p); @@ -603,7 +606,7 @@ public void add(int positionOffset, IntVector groupIds) { seenGroupIds.set(group); chunk[count++] = group; } - BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); // TODO: just for compile + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); // TODO: just for compile delegateAddInput.add(positionOffset + offset, blockFactory.newIntArrayVector(chunk, count)); } } @@ -618,7 +621,7 @@ public void addIntermediateInput(int positionOffset, IntVector groupIds, Page pa for (int i = offset; i < Math.min(groupIds.getPositionCount(), offset + emitChunkSize); i++) { chunk[count++] = groupIds.getInt(i); } - BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); // TODO: just for compile + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); // TODO: just for compile delegate.addIntermediateInput(positionOffset + offset, blockFactory.newIntArrayVector(chunk, count), page); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 567f58d0dee75..0ccf2d3af04d9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -27,6 +26,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.core.Releasables; @@ -498,12 +498,12 @@ public void testBooleanHashTrueOnly() { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); assertKeys(ordsAndKeys.keys, true); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); + assertThat(ordsAndKeys.nonEmpty, equalTo(TestBlockFactory.getNonBreakingInstance().newConstantIntVector(0, 1))); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=false, seenTrue=true, seenNull=false}")); assertOrds(ordsAndKeys.ords, 2, 2, 2, 2); assertKeys(ordsAndKeys.keys, true); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(2).build())); + assertThat(ordsAndKeys.nonEmpty, equalTo(TestBlockFactory.getNonBreakingInstance().newConstantIntVector(2, 1))); } }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); } @@ -514,11 +514,11 @@ public void testBooleanHashFalseOnly() { if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); + assertThat(ordsAndKeys.nonEmpty, equalTo(TestBlockFactory.getNonBreakingInstance().newConstantIntVector(0, 1))); } else { assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=false, seenNull=false}")); assertOrds(ordsAndKeys.ords, 1, 1, 1, 1); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(1).build())); + assertThat(ordsAndKeys.nonEmpty, equalTo(TestBlockFactory.getNonBreakingInstance().newConstantIntVector(1, 1))); } assertKeys(ordsAndKeys.keys, false); }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); @@ -1262,6 +1262,6 @@ static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { } IntVector intRange(int startInclusive, int endExclusive) { - return IntVector.range(startInclusive, endExclusive, BlockFactory.getNonBreakingInstance()); + return IntVector.range(startInclusive, endExclusive, TestBlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 11b4fb161831f..7372c1f684d6f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -12,9 +12,15 @@ import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -59,52 +65,58 @@ public void testEmpty() { void testEmpty(BlockFactory bf) { assertZeroPositionsAndRelease(bf.newIntArrayBlock(new int[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); - assertZeroPositionsAndRelease(IntBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newIntBlockBuilder(0).build()); assertZeroPositionsAndRelease(bf.newIntArrayVector(new int[] {}, 0)); - assertZeroPositionsAndRelease(IntVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newIntVectorBuilder(0).build()); assertZeroPositionsAndRelease(bf.newLongArrayBlock(new long[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); - assertZeroPositionsAndRelease(LongBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newLongBlockBuilder(0).build()); assertZeroPositionsAndRelease(bf.newLongArrayVector(new long[] {}, 0)); - assertZeroPositionsAndRelease(LongVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newLongVectorBuilder(0).build()); assertZeroPositionsAndRelease(bf.newDoubleArrayBlock(new double[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); - assertZeroPositionsAndRelease(DoubleBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newDoubleBlockBuilder(0).build()); assertZeroPositionsAndRelease(bf.newDoubleArrayVector(new double[] {}, 0)); - assertZeroPositionsAndRelease(DoubleVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newDoubleVectorBuilder(0).build()); assertZeroPositionsAndRelease( bf.newBytesRefArrayBlock(new BytesRefArray(0, bf.bigArrays()), 0, new int[] {}, new BitSet(), randomOrdering()) ); - assertZeroPositionsAndRelease(BytesRefBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBytesRefBlockBuilder(0).build()); assertZeroPositionsAndRelease(bf.newBytesRefArrayVector(new BytesRefArray(0, bf.bigArrays()), 0)); - assertZeroPositionsAndRelease(BytesRefVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBytesRefVectorBuilder(0).build()); assertZeroPositionsAndRelease(bf.newBooleanArrayBlock(new boolean[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); - assertZeroPositionsAndRelease(BooleanBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBooleanBlockBuilder(0).build()); assertZeroPositionsAndRelease(bf.newBooleanArrayVector(new boolean[] {}, 0)); - assertZeroPositionsAndRelease(BooleanVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBooleanVectorBuilder(0).build()); } public void testSmallSingleValueDenseGrowthInt() { for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = IntBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newIntBlockBuilder(initialSize)) { IntStream.range(0, 10).forEach(blockBuilder::appendInt); - assertSingleValueDenseBlock(blockBuilder.build()); + IntBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } public void testSmallSingleValueDenseGrowthLong() { for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = LongBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newLongBlockBuilder(initialSize)) { IntStream.range(0, 10).forEach(blockBuilder::appendLong); - assertSingleValueDenseBlock(blockBuilder.build()); + LongBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } public void testSmallSingleValueDenseGrowthDouble() { for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = DoubleBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newDoubleBlockBuilder(initialSize)) { IntStream.range(0, 10).forEach(blockBuilder::appendDouble); - assertSingleValueDenseBlock(blockBuilder.build()); + DoubleBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } @@ -112,18 +124,22 @@ public void testSmallSingleValueDenseGrowthDouble() { public void testSmallSingleValueDenseGrowthBytesRef() { final BytesRef NULL_VALUE = new BytesRef(); for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = BytesRefBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newBytesRefBlockBuilder(initialSize)) { IntStream.range(0, 10).mapToObj(i -> NULL_VALUE).forEach(blockBuilder::appendBytesRef); - assertSingleValueDenseBlock(blockBuilder.build()); + BytesRefBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } public void testSmallSingleValueDenseGrowthBoolean() { for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = BooleanBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newBooleanBlockBuilder(initialSize)) { IntStream.range(0, 10).forEach(i -> blockBuilder.appendBoolean(i % 3 == 0)); - assertSingleValueDenseBlock(blockBuilder.build()); + BooleanBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } @@ -160,7 +176,7 @@ public void testIntBlock() { IntBlock block; if (randomBoolean()) { final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(builderEstimateSize, blockFactory)) { + try (IntBlock.Builder blockBuilder = blockFactory.newIntBlockBuilder(builderEstimateSize)) { IntStream.range(0, positionCount).forEach(blockBuilder::appendInt); block = blockBuilder.build(); } @@ -175,7 +191,7 @@ public void testIntBlock() { assertThat(pos, is(block.getInt(pos))); assertSingleValueDenseBlock(block); - try (IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(1, blockFactory)) { + try (IntBlock.Builder blockBuilder = blockFactory.newIntBlockBuilder(1)) { IntBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -184,20 +200,19 @@ public void testIntBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> IntBlock.newBlockBuilder(size, blockFactory), - (bb, value) -> bb.appendInt(value), + blockFactory::newIntBlockBuilder, + IntBlock.Builder::appendInt, position -> position, IntBlock.Builder::build, (randomNonNullPosition, b) -> { - assertThat((int) randomNonNullPosition, is(b.getInt(randomNonNullPosition.intValue()))); + assertThat(randomNonNullPosition, is(b.getInt(randomNonNullPosition.intValue()))); } ); } try ( - IntVector.Builder vectorBuilder = IntVector.newVectorBuilder( - randomBoolean() ? randomIntBetween(1, positionCount) : positionCount, - blockFactory + IntVector.Builder vectorBuilder = blockFactory.newIntVectorBuilder( + randomBoolean() ? randomIntBetween(1, positionCount) : positionCount ) ) { IntStream.range(0, positionCount).forEach(vectorBuilder::appendInt); @@ -213,12 +228,7 @@ public void testConstantIntBlock() { assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); int value = randomInt(); - IntBlock block; - if (randomBoolean()) { - block = IntBlock.newConstantBlockWith(value, positionCount, blockFactory); - } else { - block = blockFactory.newConstantIntBlockWith(value, positionCount); - } + IntBlock block = blockFactory.newConstantIntBlockWith(value, positionCount); assertThat(positionCount, is(block.getPositionCount())); assertThat(value, is(block.getInt(0))); assertThat(value, is(block.getInt(positionCount - 1))); @@ -259,8 +269,8 @@ public void testLongBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> LongBlock.newBlockBuilder(size, blockFactory), - (bb, value) -> bb.appendLong(value), + blockFactory::newLongBlockBuilder, + LongBlock.Builder::appendLong, position -> (long) position, LongBlock.Builder::build, (randomNonNullPosition, b) -> { @@ -284,12 +294,7 @@ public void testConstantLongBlock() { assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); long value = randomLong(); - LongBlock block; - if (randomBoolean()) { - block = LongBlock.newConstantBlockWith(value, positionCount, blockFactory); - } else { - block = blockFactory.newConstantLongBlockWith(value, positionCount); - } + LongBlock block = blockFactory.newConstantLongBlockWith(value, positionCount); assertThat(positionCount, is(block.getPositionCount())); assertThat(value, is(block.getLong(0))); assertThat(value, is(block.getLong(positionCount - 1))); @@ -322,7 +327,7 @@ public void testDoubleBlock() { assertThat((double) pos, is(block.getDouble(pos))); assertSingleValueDenseBlock(block); - try (DoubleBlock.Builder blockBuilder = DoubleBlock.newBlockBuilder(1)) { + try (DoubleBlock.Builder blockBuilder = blockFactory.newDoubleBlockBuilder(1)) { DoubleBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -331,8 +336,8 @@ public void testDoubleBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> DoubleBlock.newBlockBuilder(size, blockFactory), - (bb, value) -> bb.appendDouble(value), + blockFactory::newDoubleBlockBuilder, + DoubleBlock.Builder::appendDouble, position -> (double) position, DoubleBlock.Builder::build, (randomNonNullPosition, b) -> { @@ -358,12 +363,7 @@ public void testConstantDoubleBlock() { for (int i = 0; i < 1000; i++) { int positionCount = randomIntBetween(1, 16 * 1024); double value = randomDouble(); - DoubleBlock block; - if (randomBoolean()) { - block = DoubleBlock.newConstantBlockWith(value, positionCount, blockFactory); - } else { - block = blockFactory.newConstantDoubleBlockWith(value, positionCount); - } + DoubleBlock block = blockFactory.newConstantDoubleBlockWith(value, positionCount); assertThat(positionCount, is(block.getPositionCount())); assertThat(value, is(block.getDouble(0))); assertThat(value, is(block.getDouble(positionCount - 1))); @@ -408,7 +408,7 @@ private void testBytesRefBlock(Supplier byteArraySupplier, boolean cho } assertSingleValueDenseBlock(block); - try (BytesRefBlock.Builder blockBuilder = BytesRefBlock.newBlockBuilder(1)) { + try (BytesRefBlock.Builder blockBuilder = blockFactory.newBytesRefBlockBuilder(1)) { BytesRefBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -417,8 +417,8 @@ private void testBytesRefBlock(Supplier byteArraySupplier, boolean cho if (positionCount > 1) { assertNullValues( positionCount, - size -> BytesRefBlock.newBlockBuilder(size, blockFactory), - (bb, value) -> bb.appendBytesRef(value), + blockFactory::newBytesRefBlockBuilder, + BytesRefBlock.Builder::appendBytesRef, position -> values[position], BytesRefBlock.Builder::build, (randomNonNullPosition, b) -> assertThat( @@ -498,12 +498,7 @@ public void testConstantBytesRefBlock() { for (int i = 0; i < 1000; i++) { int positionCount = randomIntBetween(1, 16 * 1024); BytesRef value = new BytesRef(randomByteArrayOfLength(between(1, 20))); - BytesRefBlock block; - if (randomBoolean()) { - block = BytesRefBlock.newConstantBlockWith(value, positionCount, blockFactory); - } else { - block = blockFactory.newConstantBytesRefBlockWith(value, positionCount); - } + BytesRefBlock block = blockFactory.newConstantBytesRefBlockWith(value, positionCount); assertThat(block.getPositionCount(), is(positionCount)); BytesRef bytes = new BytesRef(); @@ -541,7 +536,7 @@ public void testBooleanBlock() { assertThat(block.getBoolean(positionCount - 1), is((positionCount - 1) % 10 == 0)); assertSingleValueDenseBlock(block); - try (BooleanBlock.Builder blockBuilder = BooleanBlock.newBlockBuilder(1)) { + try (BooleanBlock.Builder blockBuilder = blockFactory.newBooleanBlockBuilder(1)) { BooleanBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -550,7 +545,7 @@ public void testBooleanBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> BooleanBlock.newBlockBuilder(size, blockFactory), + size -> blockFactory.newBooleanBlockBuilder(size), (bb, value) -> bb.appendBoolean(value), position -> position % 10 == 0, BooleanBlock.Builder::build, @@ -574,12 +569,7 @@ public void testConstantBooleanBlock() { for (int i = 0; i < 1000; i++) { int positionCount = randomIntBetween(1, 16 * 1024); boolean value = randomBoolean(); - BooleanBlock block; - if (randomBoolean()) { - block = BooleanBlock.newConstantBlockWith(value, positionCount, blockFactory); - } else { - block = blockFactory.newConstantBooleanBlockWith(value, positionCount); - } + BooleanBlock block = blockFactory.newConstantBooleanBlockWith(value, positionCount); assertThat(positionCount, is(block.getPositionCount())); assertThat(block.getBoolean(0), is(value)); assertThat(block.getBoolean(positionCount - 1), is(value)); @@ -593,7 +583,7 @@ public void testConstantNullBlock() { for (int i = 0; i < 100; i++) { assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); - Block block = Block.constantNullBlock(positionCount, blockFactory); + Block block = blockFactory.newConstantNullBlock(positionCount); assertTrue(block.areAllValuesNull()); assertThat(block, instanceOf(BooleanBlock.class)); assertThat(block, instanceOf(IntBlock.class)); @@ -616,7 +606,7 @@ public void testConstantNullBlock() { public void testSingleValueSparseInt() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = IntBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newIntBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; int[] values = new int[positionCount]; @@ -644,13 +634,14 @@ public void testSingleValueSparseInt() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseLong() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = LongBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newLongBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; long[] values = new long[positionCount]; @@ -677,13 +668,14 @@ public void testSingleValueSparseLong() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseDouble() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = DoubleBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newDoubleBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; double[] values = new double[positionCount]; @@ -710,13 +702,14 @@ public void testSingleValueSparseDouble() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseBoolean() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = BooleanBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newBooleanBlockBuilder(builderEstimateSize)) { boolean[] values = new boolean[positionCount]; int actualValueCount = 0; @@ -743,6 +736,7 @@ public void testSingleValueSparseBoolean() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } @@ -750,8 +744,8 @@ public void testToStringSmall() { final int estimatedSize = randomIntBetween(1024, 4096); try ( - var boolBlock = BooleanBlock.newBlockBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build(); - var boolVector = BooleanVector.newVectorBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build() + var boolBlock = blockFactory.newBooleanBlockBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build(); + var boolVector = blockFactory.newBooleanVectorBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build() ) { for (Object obj : List.of(boolVector, boolBlock, boolBlock.asVector())) { String s = obj.toString(); @@ -761,8 +755,8 @@ public void testToStringSmall() { } try ( - var intBlock = IntBlock.newBlockBuilder(estimatedSize).appendInt(1).appendInt(2).build(); - var intVector = IntVector.newVectorBuilder(estimatedSize).appendInt(1).appendInt(2).build() + var intBlock = blockFactory.newIntBlockBuilder(estimatedSize).appendInt(1).appendInt(2).build(); + var intVector = blockFactory.newIntVectorBuilder(estimatedSize).appendInt(1).appendInt(2).build() ) { for (Object obj : List.of(intVector, intBlock, intBlock.asVector())) { String s = obj.toString(); @@ -770,25 +764,38 @@ public void testToStringSmall() { assertThat(s, containsString("positions=2")); } for (IntBlock block : List.of(intBlock, intVector.asBlock())) { - assertThat(block.filter(0).toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]")); - assertThat(block.filter(1).toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=2]]")); - assertThat( - block.filter(0, 1).toString(), - containsString("IntVectorBlock[vector=IntArrayVector[positions=2, values=[1, 2]]]") - ); - assertThat(block.filter().toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=0, values=[]]]")); + try (var filter = block.filter(0)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]")); + } + try (var filter = block.filter(1)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=2]]")); + } + try (var filter = block.filter(0, 1)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=2, values=[1, 2]]]")); + } + try (var filter = block.filter()) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=0, values=[]]]")); + } } for (IntVector vector : List.of(intVector, intBlock.asVector())) { - assertThat(vector.filter(0).toString(), containsString("ConstantIntVector[positions=1, value=1]")); - assertThat(vector.filter(1).toString(), containsString("ConstantIntVector[positions=1, value=2]")); - assertThat(vector.filter(0, 1).toString(), containsString("IntArrayVector[positions=2, values=[1, 2]]")); - assertThat(vector.filter().toString(), containsString("IntArrayVector[positions=0, values=[]]")); + try (var filter = vector.filter(0)) { + assertThat(filter.toString(), containsString("ConstantIntVector[positions=1, value=1]")); + } + try (IntVector filter = vector.filter(1)) { + assertThat(filter.toString(), containsString("ConstantIntVector[positions=1, value=2]")); + } + try (IntVector filter = vector.filter(0, 1)) { + assertThat(filter.toString(), containsString("IntArrayVector[positions=2, values=[1, 2]]")); + } + try (IntVector filter = vector.filter()) { + assertThat(filter.toString(), containsString("IntArrayVector[positions=0, values=[]]")); + } } } try ( - var longBlock = LongBlock.newBlockBuilder(estimatedSize).appendLong(10L).appendLong(20L).build(); - var longVector = LongVector.newVectorBuilder(estimatedSize).appendLong(10L).appendLong(20L).build() + var longBlock = blockFactory.newLongBlockBuilder(estimatedSize).appendLong(10L).appendLong(20L).build(); + var longVector = blockFactory.newLongVectorBuilder(estimatedSize).appendLong(10L).appendLong(20L).build() ) { for (Object obj : List.of(longVector, longBlock, longBlock.asVector())) { String s = obj.toString(); @@ -798,8 +805,8 @@ public void testToStringSmall() { } try ( - var doubleBlock = DoubleBlock.newBlockBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build(); - var doubleVector = DoubleVector.newVectorBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build() + var doubleBlock = blockFactory.newDoubleBlockBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build(); + var doubleVector = blockFactory.newDoubleVectorBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build() ) { for (Object obj : List.of(doubleVector, doubleBlock, doubleBlock.asVector())) { String s = obj.toString(); @@ -810,8 +817,8 @@ public void testToStringSmall() { assert new BytesRef("1a").toString().equals("[31 61]") && new BytesRef("2b").toString().equals("[32 62]"); try ( - var blockBuilder = BytesRefBlock.newBlockBuilder(estimatedSize); - var vectorBuilder = BytesRefVector.newVectorBuilder(estimatedSize) + var blockBuilder = blockFactory.newBytesRefBlockBuilder(estimatedSize); + var vectorBuilder = blockFactory.newBytesRefVectorBuilder(estimatedSize) ) { var bytesRefBlock = blockBuilder.appendBytesRef(new BytesRef("1a")).appendBytesRef(new BytesRef("2b")).build(); var bytesRefVector = vectorBuilder.appendBytesRef(new BytesRef("1a")).appendBytesRef(new BytesRef("2b")).build(); @@ -819,6 +826,7 @@ public void testToStringSmall() { String s = obj.toString(); assertThat(s, containsString("positions=2")); } + Releasables.close(bytesRefBlock, bytesRefVector); } } @@ -863,7 +871,7 @@ public static RandomBlock randomBlock( int maxDupsPerPosition ) { return randomBlock( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), elementType, positionCount, nullAllowed, @@ -1019,7 +1027,7 @@ void releaseAndAssertBreaker(Vector vector) { static void assertCannotDoubleRelease(Block block) { var ex = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(ex.getMessage(), containsString("can't release already released block")); + assertThat(ex.getMessage(), containsString("can't release already released object")); } static void assertCannotReadFromPage(Page page) { @@ -1054,6 +1062,13 @@ public void testRefCountingArrayBlock() { assertThat(breaker.getUsed(), is(0L)); } + public void testRefCountingBigArrayBlock() { + Block block = randomBigArrayBlock(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(block); + assertThat(breaker.getUsed(), is(0L)); + } + public void testRefCountingConstantNullBlock() { Block block = blockFactory.newConstantNullBlock(10); assertThat(breaker.getUsed(), greaterThan(0L)); @@ -1070,52 +1085,165 @@ public void testRefCountingDocBlock() { } public void testRefCountingVectorBlock() { - Block block = randomNonDocVector().asBlock(); + Block block = randomConstantVector().asBlock(); assertThat(breaker.getUsed(), greaterThan(0L)); assertRefCountingBehavior(block); assertThat(breaker.getUsed(), is(0L)); } - // Take a block with exactly 1 reference and assert that ref counting works fine. - static void assertRefCountingBehavior(Block b) { - assertTrue(b.hasReferences()); + public void testRefCountingArrayVector() { + Vector vector = randomArrayVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingBigArrayVector() { + Vector vector = randomBigArrayVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingConstantVector() { + Vector vector = randomConstantVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingDocVector() { + int positionCount = randomIntBetween(0, 100); + DocVector vector = new DocVector(intVector(positionCount), intVector(positionCount), intVector(positionCount), true); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + /** + * Take an object with exactly 1 reference and assert that ref counting works fine. + * Assumes that {@link Releasable#close()} and {@link RefCounted#decRef()} are equivalent. + */ + static void assertRefCountingBehavior(T object) { + assertTrue(object.hasReferences()); int numShallowCopies = randomIntBetween(0, 15); for (int i = 0; i < numShallowCopies; i++) { if (randomBoolean()) { - b.incRef(); + object.incRef(); } else { - assertTrue(b.tryIncRef()); + assertTrue(object.tryIncRef()); } } for (int i = 0; i < numShallowCopies; i++) { if (randomBoolean()) { - b.close(); + object.close(); } else { // closing and decRef'ing must be equivalent - assertFalse(b.decRef()); + assertFalse(object.decRef()); } - assertTrue(b.hasReferences()); + assertTrue(object.hasReferences()); } if (randomBoolean()) { - b.close(); + object.close(); } else { - assertTrue(b.decRef()); + assertTrue(object.decRef()); } - assertFalse(b.hasReferences()); - assertFalse(b.tryIncRef()); + assertFalse(object.hasReferences()); + assertFalse(object.tryIncRef()); - expectThrows(IllegalStateException.class, b::close); - expectThrows(IllegalStateException.class, b::incRef); + expectThrows(IllegalStateException.class, object::close); + expectThrows(IllegalStateException.class, object::incRef); } private IntVector intVector(int positionCount) { return blockFactory.newIntArrayVector(IntStream.range(0, positionCount).toArray(), positionCount); } - private Vector randomNonDocVector() { + private Vector randomArrayVector() { + int positionCount = randomIntBetween(0, 100); + int vectorType = randomIntBetween(0, 4); + + return switch (vectorType) { + case 0 -> { + boolean[] values = new boolean[positionCount]; + Arrays.fill(values, randomBoolean()); + yield blockFactory.newBooleanArrayVector(values, positionCount); + } + case 1 -> { + BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < positionCount; i++) { + values.append(new BytesRef(randomByteArrayOfLength(between(1, 20)))); + } + + yield blockFactory.newBytesRefArrayVector(values, positionCount); + } + case 2 -> { + double[] values = new double[positionCount]; + Arrays.fill(values, 1.0); + + yield blockFactory.newDoubleArrayVector(values, positionCount); + } + case 3 -> { + int[] values = new int[positionCount]; + Arrays.fill(values, 1); + + yield blockFactory.newIntArrayVector(values, positionCount); + } + default -> { + long[] values = new long[positionCount]; + Arrays.fill(values, 1L); + + yield blockFactory.newLongArrayVector(values, positionCount); + } + }; + } + + private Vector randomBigArrayVector() { + int positionCount = randomIntBetween(0, 10000); + int arrayType = randomIntBetween(0, 3); + + return switch (arrayType) { + case 0 -> { + BitArray values = new BitArray(positionCount, blockFactory.bigArrays()); + for (int i = 0; i < positionCount; i++) { + if (randomBoolean()) { + values.set(positionCount); + } + } + + yield new BooleanBigArrayVector(values, positionCount, blockFactory); + } + case 1 -> { + DoubleArray values = blockFactory.bigArrays().newDoubleArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomDouble()); + } + + yield new DoubleBigArrayVector(values, positionCount, blockFactory); + } + case 2 -> { + IntArray values = blockFactory.bigArrays().newIntArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomInt()); + } + + yield new IntBigArrayVector(values, positionCount, blockFactory); + } + default -> { + LongArray values = blockFactory.bigArrays().newLongArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomLong()); + } + + yield new LongBigArrayVector(values, positionCount, blockFactory); + } + }; + } + + private Vector randomConstantVector() { int positionCount = randomIntBetween(0, 100); int vectorType = randomIntBetween(0, 4); @@ -1135,7 +1263,7 @@ private Block randomArrayBlock() { return switch (arrayType) { case 0 -> { boolean[] values = new boolean[positionCount]; - Arrays.fill(values, true); + Arrays.fill(values, randomBoolean()); yield blockFactory.newBooleanArrayBlock(values, positionCount, new int[] {}, new BitSet(), randomOrdering()); } @@ -1167,4 +1295,46 @@ private Block randomArrayBlock() { } }; } + + private Block randomBigArrayBlock() { + int positionCount = randomIntBetween(0, 10000); + int arrayType = randomIntBetween(0, 3); + + return switch (arrayType) { + case 0 -> { + BitArray values = new BitArray(positionCount, blockFactory.bigArrays()); + for (int i = 0; i < positionCount; i++) { + if (randomBoolean()) { + values.set(positionCount); + } + } + + yield new BooleanBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + case 1 -> { + DoubleArray values = blockFactory.bigArrays().newDoubleArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomDouble()); + } + + yield new DoubleBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + case 2 -> { + IntArray values = blockFactory.bigArrays().newIntArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomInt()); + } + + yield new IntBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + default -> { + LongArray values = blockFactory.bigArrays().newLongArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomLong()); + } + + yield new LongBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + }; + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java index 9130698a2c12b..f76ff0708120b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java @@ -45,7 +45,7 @@ public void testEqualityAndHashCodeSmallInput() { EqualsHashCodeTestUtils.checkEqualsAndHashCode( in, page -> new Page(0), - page -> new Page(1, IntBlock.newConstantBlockWith(1, 1)), + page -> new Page(1, blockFactory.newConstantIntBlockWith(1, 1)), Page::releaseBlocks ); in.releaseBlocks(); @@ -71,8 +71,8 @@ public void testEqualityAndHashCodeSmallInput() { in = new Page(blockFactory.newIntArrayVector(new int[] { 1, 1, 1 }, 3).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( in, - page -> new Page(IntBlock.newConstantBlockWith(1, 3)), - page -> new Page(IntBlock.newConstantBlockWith(1, 2)), + page -> new Page(blockFactory.newConstantIntBlockWith(1, 3)), + page -> new Page(blockFactory.newConstantIntBlockWith(1, 2)), Page::releaseBlocks ); in.releaseBlocks(); @@ -124,7 +124,10 @@ public void testEqualityAndHashCode() throws IOException { int positions = randomInt(page.getPositionCount() - 1); for (int blockIndex = 0; blockIndex < blocks.length; blockIndex++) { Block block = page.getBlock(blockIndex); - blocks[blockIndex] = block.elementType().newBlockBuilder(positions).copyFrom(block, 0, page.getPositionCount() - 1).build(); + blocks[blockIndex] = block.elementType() + .newBlockBuilder(positions, TestBlockFactory.getNonBreakingInstance()) + .copyFrom(block, 0, page.getPositionCount() - 1) + .build(); } return new Page(blocks); }; @@ -137,10 +140,10 @@ public void testEqualityAndHashCode() throws IOException { case 0 -> blockFactory.newIntArrayVector(randomInts(positions).toArray(), positions).asBlock(); case 1 -> blockFactory.newLongArrayVector(randomLongs(positions).toArray(), positions).asBlock(); case 2 -> blockFactory.newDoubleArrayVector(randomDoubles(positions).toArray(), positions).asBlock(); - case 3 -> IntBlock.newConstantBlockWith(randomInt(), positions); - case 4 -> LongBlock.newConstantBlockWith(randomLong(), positions); - case 5 -> DoubleBlock.newConstantBlockWith(randomDouble(), positions); - case 6 -> BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), positions); + case 3 -> blockFactory.newConstantIntBlockWith(randomInt(), positions); + case 4 -> blockFactory.newConstantLongBlockWith(randomLong(), positions); + case 5 -> blockFactory.newConstantDoubleBlockWith(randomDouble(), positions); + case 6 -> blockFactory.newConstantBytesRefBlockWith(new BytesRef(Integer.toHexString(randomInt())), positions); default -> throw new AssertionError(); }; } @@ -183,10 +186,10 @@ public void testPageSerializationSimple() throws IOException { blockFactory.newLongArrayVector(LongStream.range(10, 20).toArray(), 10).asBlock(), blockFactory.newDoubleArrayVector(LongStream.range(30, 40).mapToDouble(i -> i).toArray(), 10).asBlock(), blockFactory.newBytesRefArrayVector(bytesRefArrayOf("0a", "1b", "2c", "3d", "4e", "5f", "6g", "7h", "8i", "9j"), 10).asBlock(), - IntBlock.newConstantBlockWith(randomInt(), 10), - LongBlock.newConstantBlockWith(randomInt(), 10), - DoubleBlock.newConstantBlockWith(randomInt(), 10), - BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), + blockFactory.newConstantIntBlockWith(randomInt(), 10), + blockFactory.newConstantLongBlockWith(randomLong(), 10), + blockFactory.newConstantDoubleBlockWith(randomDouble(), 10), + blockFactory.newConstantBytesRefBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), toFilter.filter(5, 6, 7, 8, 9, 10, 11, 12, 13, 14).asBlock() ); toFilter.close(); @@ -215,9 +218,9 @@ public void testSerializationListPages() throws IOException { new Page(blockFactory.newIntArrayVector(randomInts(positions).toArray(), positions).asBlock()), new Page( blockFactory.newLongArrayVector(randomLongs(positions).toArray(), positions).asBlock(), - DoubleBlock.newConstantBlockWith(randomInt(), positions) + blockFactory.newConstantDoubleBlockWith(randomInt(), positions) ), - new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("Hello World"), positions)) + new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("Hello World"), positions)) ); try { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, page -> { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java index a9f08eee02d70..9c1b02aa74107 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java @@ -7,17 +7,19 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; import java.util.ArrayList; import java.util.List; import static org.hamcrest.Matchers.equalTo; -public class BlockBuilderAppendBlockTests extends ESTestCase { +public class BlockBuilderAppendBlockTests extends ComputeTestCase { public void testBasic() { - IntBlock src = new IntBlockBuilder(10, BlockFactory.getNonBreakingInstance()).appendInt(1) + BlockFactory blockFactory = blockFactory(); + IntBlock src = blockFactory.newIntBlockBuilder(10) + .appendInt(1) .appendNull() .beginPositionEntry() .appendInt(4) @@ -32,40 +34,48 @@ public void testBasic() { .endPositionEntry() .build(); // copy position by position - { - IntBlock.Builder dst = IntBlock.newBlockBuilder(randomIntBetween(1, 20)); + try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { for (int i = 0; i < src.getPositionCount(); i++) { - dst.appendAllValuesToCurrentPosition(src.filter(i)); + try (IntBlock filter = src.filter(i)) { + dst.appendAllValuesToCurrentPosition(filter); + } + } + try (IntBlock block = dst.build()) { + assertThat(block, equalTo(src)); } - assertThat(dst.build(), equalTo(src)); } // copy all block - { - IntBlock.Builder dst = IntBlock.newBlockBuilder(randomIntBetween(1, 20)); - IntBlock block = dst.appendAllValuesToCurrentPosition(src).build(); - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); + try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { + try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { + assertThat(block.getPositionCount(), equalTo(1)); + assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); + } } - { - Block dst = randomlyDivideAndMerge(src); + try (Block dst = randomlyDivideAndMerge(src)) { assertThat(dst.getPositionCount(), equalTo(1)); assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); } } public void testRandomNullBlock() { - IntBlock.Builder src = IntBlock.newBlockBuilder(10); - src.appendAllValuesToCurrentPosition(new ConstantNullBlock(between(1, 100))); + BlockFactory blockFactory = blockFactory(); + IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); + try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { + src.appendAllValuesToCurrentPosition(nullBlock); + } src.appendInt(101); - src.appendAllValuesToCurrentPosition(new ConstantNullBlock(between(1, 100))); + try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { + src.appendAllValuesToCurrentPosition(nullBlock); + } IntBlock block = src.build(); assertThat(block.getPositionCount(), equalTo(3)); assertTrue(block.isNull(0)); assertThat(block.getInt(1), equalTo(101)); assertTrue(block.isNull(2)); - Block flatten = randomlyDivideAndMerge(block); - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); + try (Block flatten = randomlyDivideAndMerge(block)) { + assertThat(flatten.getPositionCount(), equalTo(1)); + assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); + } } public void testRandom() { @@ -79,14 +89,17 @@ public void testRandom() { 0, between(0, 16) ).block(); - randomlyDivideAndMerge(block); + + block = randomlyDivideAndMerge(block); + block.close(); } private Block randomlyDivideAndMerge(Block block) { while (block.getPositionCount() > 1 || randomBoolean()) { int positionCount = block.getPositionCount(); int offset = 0; - Block.Builder builder = block.elementType().newBlockBuilder(randomIntBetween(1, 100)); + Block.Builder builder = block.elementType() + .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); List expected = new ArrayList<>(); while (offset < positionCount) { int length = randomIntBetween(1, positionCount - offset); @@ -98,7 +111,9 @@ private Block randomlyDivideAndMerge(Block block) { Block sub = block.filter(positions); expected.add(extractAndFlattenBlockValues(sub)); builder.appendAllValuesToCurrentPosition(sub); + sub.close(); } + block.close(); block = builder.build(); assertThat(block.getPositionCount(), equalTo(expected.size())); for (int i = 0; i < block.getPositionCount(); i++) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index 529c1afeaaf44..e3a9aba0d1b7f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -69,22 +69,24 @@ public void testEvensFiltered() { } public void testSmallAllNull() { - assertSmall(Block.constantNullBlock(10)); + assertSmall(TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(10)); } public void testEvensAllNull() { - assertEvens(Block.constantNullBlock(10)); + assertEvens(TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(10)); } private void assertSmall(Block block) { int smallSize = Math.min(block.getPositionCount(), 10); - Block.Builder builder = elementType.newBlockBuilder(smallSize); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block.Builder builder = elementType.newBlockBuilder(smallSize, blockFactory); builder.copyFrom(block, 0, smallSize); assertBlockValues(builder.build(), BasicBlockTests.valuesAtPositions(block, 0, smallSize)); } private void assertEvens(Block block) { - Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2, blockFactory); List> expected = new ArrayList<>(); for (int i = 0; i < block.getPositionCount(); i += 2) { builder.copyFrom(block, i, i + 1); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index b096b1e6b1a07..b13aa040f307d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -30,122 +30,172 @@ public class BlockSerializationTests extends SerializationTestCase { public void testConstantIntBlock() throws IOException { - assertConstantBlockImpl(IntBlock.newConstantBlockWith(randomInt(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 8192))); } public void testConstantLongBlockLong() throws IOException { - assertConstantBlockImpl(LongBlock.newConstantBlockWith(randomLong(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 8192))); } public void testConstantDoubleBlock() throws IOException { - assertConstantBlockImpl(DoubleBlock.newConstantBlockWith(randomDouble(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 8192))); } public void testConstantBytesRefBlock() throws IOException { - Block block = BytesRefBlock.newConstantBlockWith(new BytesRef(((Integer) randomInt()).toString()), randomIntBetween(1, 8192)); + Block block = blockFactory.newConstantBytesRefBlockWith( + new BytesRef(((Integer) randomInt()).toString()), + randomIntBetween(1, 8192) + ); assertConstantBlockImpl(block); } private void assertConstantBlockImpl(Block origBlock) throws IOException { assertThat(origBlock.asVector().isConstant(), is(true)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); assertThat(deserBlock.asVector().isConstant(), is(true)); } } public void testEmptyIntBlock() throws IOException { - assertEmptyBlock(IntBlock.newBlockBuilder(0).build()); - assertEmptyBlock(IntBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(IntVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(IntVector.newVectorBuilder(0).appendInt(randomInt()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newIntBlockBuilder(0).build()); + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newIntVectorBuilder(0).build().asBlock()); + try (IntVector toFilter = blockFactory.newIntVectorBuilder(0).appendInt(randomInt()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyLongBlock() throws IOException { - assertEmptyBlock(LongBlock.newBlockBuilder(0).build()); - assertEmptyBlock(LongBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(LongVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(LongVector.newVectorBuilder(0).appendLong(randomLong()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newLongBlockBuilder(0).build()); + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newLongVectorBuilder(0).build().asBlock()); + try (LongVector toFilter = blockFactory.newLongVectorBuilder(0).appendLong(randomLong()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyDoubleBlock() throws IOException { - assertEmptyBlock(DoubleBlock.newBlockBuilder(0).build()); - assertEmptyBlock(DoubleBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(DoubleVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(DoubleVector.newVectorBuilder(0).appendDouble(randomDouble()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newDoubleBlockBuilder(0).build()); + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newDoubleVectorBuilder(0).build().asBlock()); + try (DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(0).appendDouble(randomDouble()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyBytesRefBlock() throws IOException { - assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).build()); - assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(BytesRefVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(BytesRefVector.newVectorBuilder(0).appendBytesRef(randomBytesRef()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newBytesRefBlockBuilder(0).build()); + try (BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newBytesRefVectorBuilder(0).build().asBlock()); + try (BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0).appendBytesRef(randomBytesRef()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } private void assertEmptyBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(0)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); } } public void testFilterIntBlock() throws IOException { - assertFilterBlock(IntBlock.newBlockBuilder(0).appendInt(1).appendInt(2).build().filter(1)); - assertFilterBlock(IntBlock.newBlockBuilder(1).appendInt(randomInt()).appendNull().build().filter(0)); - assertFilterBlock(IntVector.newVectorBuilder(1).appendInt(randomInt()).build().filter(0).asBlock()); - assertFilterBlock(IntVector.newVectorBuilder(1).appendInt(randomInt()).appendInt(randomInt()).build().filter(0).asBlock()); + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(0).appendInt(1).appendInt(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(1).appendInt(randomInt()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (IntVector toFilter = blockFactory.newIntVectorBuilder(1).appendInt(randomInt()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } + try (IntVector toFilter = blockFactory.newIntVectorBuilder(1).appendInt(randomInt()).appendInt(randomInt()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterLongBlock() throws IOException { - assertFilterBlock(LongBlock.newBlockBuilder(0).appendLong(1).appendLong(2).build().filter(1)); - assertFilterBlock(LongBlock.newBlockBuilder(1).appendLong(randomLong()).appendNull().build().filter(0)); - assertFilterBlock(LongVector.newVectorBuilder(1).appendLong(randomLong()).build().filter(0).asBlock()); - assertFilterBlock(LongVector.newVectorBuilder(1).appendLong(randomLong()).appendLong(randomLong()).build().filter(0).asBlock()); + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(0).appendLong(1).appendLong(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(1).appendLong(randomLong()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (LongVector toFilter = blockFactory.newLongVectorBuilder(1).appendLong(randomLong()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } + try (LongVector toFilter = blockFactory.newLongVectorBuilder(1).appendLong(randomLong()).appendLong(randomLong()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterDoubleBlock() throws IOException { - assertFilterBlock(DoubleBlock.newBlockBuilder(0).appendDouble(1).appendDouble(2).build().filter(1)); - assertFilterBlock(DoubleBlock.newBlockBuilder(1).appendDouble(randomDouble()).appendNull().build().filter(0)); - assertFilterBlock(DoubleVector.newVectorBuilder(1).appendDouble(randomDouble()).build().filter(0).asBlock()); - assertFilterBlock( - DoubleVector.newVectorBuilder(1).appendDouble(randomDouble()).appendDouble(randomDouble()).build().filter(0).asBlock() - ); + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(0).appendDouble(1).appendDouble(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(1).appendDouble(randomDouble()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(1).appendDouble(randomDouble()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + + } + try ( + DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(1).appendDouble(randomDouble()).appendDouble(randomDouble()).build() + ) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterBytesRefBlock() throws IOException { - assertFilterBlock( - BytesRefBlock.newBlockBuilder(0) + try ( + BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0) .appendBytesRef(randomBytesRef()) .appendBytesRef(randomBytesRef()) .build() - .filter(randomIntBetween(0, 1)) - ); - assertFilterBlock( - BytesRefBlock.newBlockBuilder(0).appendBytesRef(randomBytesRef()).appendNull().build().filter(randomIntBetween(0, 1)) - ); - assertFilterBlock(BytesRefVector.newVectorBuilder(0).appendBytesRef(randomBytesRef()).build().asBlock().filter(0)); - assertFilterBlock( - BytesRefVector.newVectorBuilder(0) + ) { + assertFilterBlock(toFilter.filter(randomIntBetween(0, 1))); + } + + try (BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(randomBytesRef()).appendNull().build()) { + assertFilterBlock(toFilter.filter(randomIntBetween(0, 1))); + } + + try (BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0).appendBytesRef(randomBytesRef()).build()) { + assertFilterBlock(toFilter.asBlock().filter(0)); + } + try ( + BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0) .appendBytesRef(randomBytesRef()) .appendBytesRef(randomBytesRef()) .build() - .asBlock() - .filter(randomIntBetween(0, 1)) - ); + ) { + assertFilterBlock(toFilter.asBlock().filter(randomIntBetween(0, 1))); + } } private void assertFilterBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(1)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); assertThat(deserBlock.getPositionCount(), is(1)); } } public void testConstantNullBlock() throws IOException { - Block origBlock = new ConstantNullBlock(randomIntBetween(1, 8192)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + try (Block origBlock = blockFactory.newConstantNullBlock(randomIntBetween(1, 8192))) { + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + } } } @@ -203,6 +253,6 @@ protected final BigArrays nonBreakingBigArrays() { * A {@link DriverContext} with a nonBreakingBigArrays. */ protected DriverContext driverContext() { // TODO make this final and return a breaking block factory - return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); + return new DriverContext(nonBreakingBigArrays(), TestBlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java index b98049cd935fa..c0fc539cecc6c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java @@ -14,17 +14,17 @@ public class BooleanBlockEqualityTests extends ESTestCase { - static final BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( blockFactory.newBooleanArrayVector(new boolean[] {}, 0), blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 0), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0).asVector(), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0).filter().asVector(), - BooleanBlock.newBlockBuilder(0).build().asVector(), - BooleanBlock.newBlockBuilder(0).appendBoolean(randomBoolean()).build().asVector().filter() + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0).asVector(), + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0).filter().asVector(), + blockFactory.newBooleanBlockBuilder(0).build().asVector(), + blockFactory.newBooleanBlockBuilder(0).appendBoolean(randomBoolean()).build().asVector().filter() ); assertAllEquals(vectors); } @@ -48,10 +48,10 @@ public void testEmptyBlock() { randomFrom(Block.MvOrdering.values()), blockFactory ), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0), - BooleanBlock.newBlockBuilder(0).build(), - BooleanBlock.newBlockBuilder(0).appendBoolean(randomBoolean()).build().filter(), - BooleanBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0), + blockFactory.newBooleanBlockBuilder(0).build(), + blockFactory.newBooleanBlockBuilder(0).appendBoolean(randomBoolean()).build().filter(), + blockFactory.newBooleanBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -66,9 +66,9 @@ public void testVectorEquality() { blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2), blockFactory.newBooleanArrayVector(new boolean[] { false, true, false, true }, 4).filter(1, 2, 3), blockFactory.newBooleanArrayVector(new boolean[] { true, true, false, true }, 4).filter(0, 2, 3), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().asVector().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanVectorBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), + blockFactory.newBooleanVectorBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) @@ -76,7 +76,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) @@ -96,10 +96,16 @@ public void testVectorEquality() { blockFactory.newBooleanArrayVector(new boolean[] { true, true, true, false }, 4).filter(0, 1, 2), blockFactory.newBooleanArrayVector(new boolean[] { false, true, true, true }, 4).filter(1, 2, 3), blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, true }, 4).filter(0, 2, 3), - BooleanBlock.newConstantBlockWith(true, 3).asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newConstantBooleanBlockWith(true, 3).asVector(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(3) + .appendBoolean(true) + .appendBoolean(true) + .appendBoolean(true) + .build() + .asVector() + .filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) @@ -107,7 +113,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) @@ -143,16 +149,16 @@ public void testBlockEquality() { blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 3).filter(0, 1, 2).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { true, false, false, true }, 4).filter(0, 1, 3).asBlock(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) .build() .filter(0, 2, 3), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendNull() .appendBoolean(false) @@ -185,11 +191,11 @@ public void testBlockEquality() { blockFactory.newBooleanArrayVector(new boolean[] { true, true, false }, 2).filter(0, 1).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { true, true, false }, 3).filter(0, 1).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 2).asBlock(), - BooleanBlock.newConstantBlockWith(true, 2), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).build(), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).build().filter(0, 1), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().filter(0, 2), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendNull().appendBoolean(true).build().filter(0, 2) + blockFactory.newConstantBooleanBlockWith(true, 2), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).build(), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).build().filter(0, 1), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().filter(0, 2), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendNull().appendBoolean(true).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -202,10 +208,10 @@ public void testVectorInequality() { blockFactory.newBooleanArrayVector(new boolean[] { true, false }, 2), blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3), blockFactory.newBooleanArrayVector(new boolean[] { false, true, false }, 3), - BooleanBlock.newConstantBlockWith(true, 2).asVector(), - BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(false).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(1) + blockFactory.newConstantBooleanBlockWith(true, 2).asVector(), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(false).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(false).appendBoolean(false).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(1) .appendBoolean(false) .appendBoolean(false) .appendBoolean(false) @@ -224,13 +230,23 @@ public void testBlockInequality() { blockFactory.newBooleanArrayVector(new boolean[] { false, true }, 2).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { false, true, false }, 3).asBlock(), blockFactory.newBooleanArrayVector(new boolean[] { false, false, true }, 3).asBlock(), - BooleanBlock.newConstantBlockWith(true, 2), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendBoolean(false).appendBoolean(true).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendNull().build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendNull().appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).beginPositionEntry().appendBoolean(false).appendBoolean(false).build() + blockFactory.newConstantBooleanBlockWith(true, 2), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(1) + .appendBoolean(true) + .appendBoolean(false) + .appendBoolean(true) + .appendBoolean(false) + .build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendNull().build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendNull().appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(3) + .appendBoolean(true) + .beginPositionEntry() + .appendBoolean(false) + .appendBoolean(false) + .build() ); assertAllNotEquals(notEqualBlocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index 5c8b5c8538349..ec740db329c74 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -22,7 +22,7 @@ public class BytesRefBlockEqualityTests extends ComputeTestCase { final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); - final BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent @@ -30,10 +30,10 @@ public void testEmptyVector() { List vectors = List.of( new BytesRefArrayVector(bytesRefArray1, 0, blockFactory), new BytesRefArrayVector(bytesRefArray2, 0, blockFactory), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0).asVector(), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0).filter().asVector(), - BytesRefBlock.newBlockBuilder(0).build().asVector(), - BytesRefBlock.newBlockBuilder(0).appendBytesRef(new BytesRef()).build().asVector().filter() + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0).asVector(), + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0).filter().asVector(), + blockFactory.newBytesRefBlockBuilder(0).build().asVector(), + blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(new BytesRef()).build().asVector().filter() ); assertAllEquals(vectors); } @@ -59,10 +59,10 @@ public void testEmptyBlock() { randomFrom(Block.MvOrdering.values()), blockFactory ), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0), - BytesRefBlock.newBlockBuilder(0).build(), - BytesRefBlock.newBlockBuilder(0).appendBytesRef(new BytesRef()).build().filter(), - BytesRefBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0), + blockFactory.newBytesRefBlockBuilder(0).build(), + blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(new BytesRef()).build().filter(), + blockFactory.newBytesRefBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -77,20 +77,20 @@ public void testVectorEquality() { new BytesRefArrayVector(bytesRefArray2, 3, blockFactory), new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2), new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .asVector() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) @@ -98,7 +98,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) @@ -118,21 +118,21 @@ public void testVectorEquality() { new BytesRefArrayVector(bytesRefArray2, 3, blockFactory), new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2), new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2), - BytesRefBlock.newConstantBlockWith(new BytesRef("1"), 3).asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newConstantBytesRefBlockWith(new BytesRef("1"), 3).asVector(), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .build() .asVector() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("1")) @@ -140,7 +140,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("1")) @@ -177,25 +177,25 @@ public void testBlockEquality() { new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2).asBlock(), new BytesRefArrayVector(bytesRefArray2, 3, blockFactory).filter(0, 1, 2).asBlock(), new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2).asBlock(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .filter(0, 2, 3), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendNull() .appendBytesRef(new BytesRef("2")) @@ -229,16 +229,20 @@ public void testBlockEquality() { new BytesRefArrayVector(bytesRefArray1, 2, blockFactory).filter(0, 1).asBlock(), new BytesRefArrayVector(bytesRefArray2, 2, blockFactory).filter(0, 1).asBlock(), new BytesRefArrayVector(bytesRefArray2, 3, blockFactory).filter(0, 1).asBlock(), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build().filter(0, 1), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2), + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build(), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("9")) + .appendBytesRef(new BytesRef("9")) + .build() + .filter(0, 1), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("9")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("9")) .build() .filter(0, 2), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("9")) .appendNull() .appendBytesRef(new BytesRef("9")) @@ -264,20 +268,20 @@ public void testVectorInequality() { new BytesRefArrayVector(bytesRefArray3, 2, blockFactory), new BytesRefArrayVector(bytesRefArray4, 3, blockFactory), new BytesRefArrayVector(bytesRefArray5, 3, blockFactory), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2).asVector(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2).asVector(), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .build() .asVector() .filter(1), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("5")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) @@ -304,22 +308,30 @@ public void testBlockInequality() { new BytesRefArrayVector(bytesRefArray3, 2, blockFactory).asBlock(), new BytesRefArrayVector(bytesRefArray4, 3, blockFactory).asBlock(), new BytesRefArrayVector(bytesRefArray5, 3, blockFactory).asBlock(), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("2")).build().filter(1), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("1")) + .appendBytesRef(new BytesRef("2")) + .build() + .filter(1), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("5")) .build(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .appendBytesRef(new BytesRef("4")) .build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("3")).build() + blockFactory.newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().build(), + blockFactory.newBytesRefBlockBuilder(1) + .appendBytesRef(new BytesRef("1")) + .appendNull() + .appendBytesRef(new BytesRef("3")) + .build(), + blockFactory.newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("3")).build() ); assertAllNotEquals(notEqualBlocks); } @@ -327,8 +339,12 @@ public void testBlockInequality() { public void testSimpleBlockWithSingleNull() { List blocks = List.of( - BytesRefBlock.newBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build(), - BytesRefBlock.newBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build() + blockFactory.newBytesRefBlockBuilder(3) + .appendBytesRef(new BytesRef("1")) + .appendNull() + .appendBytesRef(new BytesRef("3")) + .build(), + blockFactory.newBytesRefBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -338,8 +354,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -356,12 +372,12 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .beginPositionEntry() .appendBytesRef(new BytesRef("1a")) .appendBytesRef(new BytesRef("2b")) .build(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .beginPositionEntry() .appendBytesRef(new BytesRef("1a")) .appendBytesRef(new BytesRef("2b")) @@ -375,9 +391,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder3 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder3 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index 6a2585f8f4d98..1c14683e178b8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.core.Releasables; @@ -36,20 +37,24 @@ public void testNonDecreasingSetFalse() { } public void testNonDecreasingNonConstantShard() { - DocVector docs = new DocVector(intRange(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), null); + BlockFactory blockFactory = blockFactory(); + DocVector docs = new DocVector(intRange(0, 2), blockFactory.newConstantIntVector(0, 2), intRange(0, 2), null); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testNonDecreasingNonConstantSegment() { - DocVector docs = new DocVector(IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), intRange(0, 2), null); + BlockFactory blockFactory = blockFactory(); + DocVector docs = new DocVector(blockFactory.newConstantIntVector(0, 2), intRange(0, 2), intRange(0, 2), null); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testNonDecreasingDescendingDocs() { BlockFactory blockFactory = blockFactory(); DocVector docs = new DocVector( - IntBlock.newConstantBlockWith(0, 2).asVector(), - IntBlock.newConstantBlockWith(0, 2).asVector(), + blockFactory.newConstantIntVector(0, 2), + blockFactory.newConstantIntVector(0, 2), blockFactory.newIntArrayVector(new int[] { 1, 0 }, 2), null ); @@ -104,7 +109,7 @@ public void testRandomShardSegmentDocMap() { private void assertShardSegmentDocMap(int[][] data, int[][] expected) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - try (DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length, blockFactory)) { + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, data.length)) { for (int r = 0; r < data.length; r++) { builder.appendShard(data[r][0]); builder.appendSegment(data[r][1]); @@ -137,7 +142,8 @@ private void assertShardSegmentDocMap(int[][] data, int[][] expected) { } public void testCannotDoubleRelease() { - var block = new DocVector(intRange(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), null).asBlock(); + BlockFactory blockFactory = blockFactory(); + var block = new DocVector(intRange(0, 2), blockFactory.newConstantIntBlockWith(0, 2).asVector(), intRange(0, 2), null).asBlock(); assertThat(block.isReleased(), is(false)); Page page = new Page(block); @@ -145,7 +151,7 @@ public void testCannotDoubleRelease() { assertThat(block.isReleased(), is(true)); Exception e = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(e.getMessage(), containsString("can't release already released block")); + assertThat(e.getMessage(), containsString("can't release already released object")); e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); @@ -155,17 +161,55 @@ public void testCannotDoubleRelease() { } public void testRamBytesUsedWithout() { + BlockFactory blockFactory = blockFactory(); DocVector docs = new DocVector( - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), false ); assertThat(docs.singleSegmentNonDecreasing(), is(false)); docs.ramBytesUsed(); // ensure non-singleSegmentNonDecreasing handles nulls in ramByteUsed + docs.close(); + } + + public void testFilter() { + BlockFactory factory = blockFactory(); + try ( + DocVector docs = new DocVector( + factory.newConstantIntVector(0, 10), + factory.newConstantIntVector(0, 10), + factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), + false + ); + DocVector filtered = docs.filter(1, 2, 3); + DocVector expected = new DocVector( + factory.newConstantIntVector(0, 3), + factory.newConstantIntVector(0, 3), + factory.newIntArrayVector(new int[] { 1, 2, 3 }, 3), + false + ); + ) { + assertThat(filtered, equalTo(expected)); + } + } + + public void testFilterBreaks() { + BlockFactory factory = blockFactory(ByteSizeValue.ofBytes(between(160, 280))); + try ( + DocVector docs = new DocVector( + factory.newConstantIntVector(0, 10), + factory.newConstantIntVector(0, 10), + factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), + false + ) + ) { + Exception e = expectThrows(CircuitBreakingException.class, () -> docs.filter(1, 2, 3)); + assertThat(e.getMessage(), equalTo("over test limit")); + } } IntVector intRange(int startInclusive, int endExclusive) { - return IntVector.range(startInclusive, endExclusive, BlockFactory.getNonBreakingInstance()); + return IntVector.range(startInclusive, endExclusive, TestBlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 03ae026f4f539..e8f8fbcbf1c4c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -8,53 +8,52 @@ package org.elasticsearch.compute.data; import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.core.Releasables; import java.util.BitSet; import java.util.List; public class DoubleBlockEqualityTests extends ComputeTestCase { - static final BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( blockFactory.newDoubleArrayVector(new double[] {}, 0), blockFactory.newDoubleArrayVector(new double[] { 0 }, 0), - DoubleBlock.newConstantBlockWith(0, 0).asVector(), - DoubleBlock.newConstantBlockWith(0, 0).filter().asVector(), - DoubleBlock.newBlockBuilder(0).build().asVector(), - DoubleBlock.newBlockBuilder(0).appendDouble(1).build().asVector().filter() + blockFactory.newConstantDoubleVector(0, 0), + blockFactory.newConstantDoubleBlockWith(0, 0).filter().asVector(), + blockFactory.newDoubleBlockBuilder(0).build().asVector(), + blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().asVector().filter() ); assertAllEquals(vectors); } public void testEmptyBlock() { - BlockFactory blockFactory = blockFactory(); // all these "empty" vectors should be equivalent List blocks = List.of( - new DoubleArrayBlock( + blockFactory.newDoubleArrayBlock( new double[] {}, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()), - blockFactory + randomFrom(Block.MvOrdering.values()) ), - new DoubleArrayBlock( + blockFactory.newDoubleArrayBlock( new double[] { 0 }, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()), - blockFactory + randomFrom(Block.MvOrdering.values()) ), - DoubleBlock.newConstantBlockWith(0, 0), - DoubleBlock.newBlockBuilder(0).build(), - DoubleBlock.newBlockBuilder(0).appendDouble(1).build().filter(), - DoubleBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantDoubleBlockWith(0, 0), + blockFactory.newDoubleBlockBuilder(0).build(), + blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().filter(), + blockFactory.newDoubleBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); + Releasables.close(blocks); } public void testVectorEquality() { @@ -67,9 +66,9 @@ public void testVectorEquality() { blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) @@ -77,7 +76,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) @@ -97,10 +96,10 @@ public void testVectorEquality() { blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - DoubleBlock.newConstantBlockWith(1, 3).asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3) + blockFactory.newConstantDoubleBlockWith(1, 3).asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) @@ -108,7 +107,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) @@ -144,10 +143,10 @@ public void testBlockEquality() { blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(4).appendDouble(2).appendDouble(3).build().filter(0, 2, 3), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendNull().appendDouble(2).appendDouble(3).build().filter(0, 2, 3) + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(4).appendDouble(2).appendDouble(3).build().filter(0, 2, 3), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendNull().appendDouble(2).appendDouble(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); @@ -174,11 +173,11 @@ public void testBlockEquality() { blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - DoubleBlock.newConstantBlockWith(9, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(9).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(9).build().filter(0, 1), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(4).appendDouble(9).build().filter(0, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendNull().appendDouble(9).build().filter(0, 2) + blockFactory.newConstantDoubleBlockWith(9, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build().filter(0, 1), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(4).appendDouble(9).build().filter(0, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendNull().appendDouble(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -191,10 +190,10 @@ public void testVectorInequality() { blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3), - DoubleBlock.newConstantBlockWith(9, 2).asVector(), - DoubleBlock.newBlockBuilder(2).appendDouble(1).appendDouble(2).build().asVector().filter(1), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build().asVector(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build().asVector() + blockFactory.newConstantDoubleBlockWith(9, 2).asVector(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().asVector().filter(1), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build().asVector(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -207,22 +206,22 @@ public void testBlockInequality() { blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3).asBlock(), - DoubleBlock.newConstantBlockWith(9, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(1).appendDouble(2).build().filter(1), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendNull().build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendNull().appendDouble(3).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(3).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).beginPositionEntry().appendDouble(2).appendDouble(3).build() + blockFactory.newConstantDoubleBlockWith(9, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().filter(1), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).beginPositionEntry().appendDouble(2).appendDouble(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - DoubleBlock.newBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build() + blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -232,8 +231,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -250,8 +249,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - DoubleBlock.newBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build(), - DoubleBlock.newBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build() + blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build(), + blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build() ); assert blocks.get(0).getPositionCount() == 1 && blocks.get(0).getValueCount(0) == 2; assertAllEquals(blocks); @@ -260,9 +259,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder3 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder3 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java index e4846c59376bf..dc78b3715d12a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java @@ -288,7 +288,10 @@ public void testFilterToStringMultiValue() { var filter = block.filter(0, 1); assertThat( filter.toString(), - containsString("BooleanArrayBlock[positions=2, mvOrdering=UNORDERED, values=[true, true, false, false]]") + containsString( + "BooleanArrayBlock[positions=2, mvOrdering=UNORDERED, " + + "vector=BooleanArrayVector[positions=4, values=[true, true, false, false]]]" + ) ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); @@ -300,7 +303,12 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendInt(90).appendInt(1000).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("IntArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0, 10, 20, 50]]")); + assertThat( + filter.toString(), + containsString( + "IntArrayBlock[positions=2, mvOrdering=UNORDERED, vector=IntArrayVector[positions=4, values=[0, 10, 20, 50]]]" + ) + ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); } @@ -311,7 +319,12 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendLong(90).appendLong(1000).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("LongArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0, 10, 20, 50]]")); + assertThat( + filter.toString(), + containsString( + "LongArrayBlock[positions=2, mvOrdering=UNORDERED, vector=LongArrayVector[positions=4, values=[0, 10, 20, 50]]]" + ) + ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); } @@ -324,7 +337,10 @@ public void testFilterToStringMultiValue() { var filter = block.filter(0, 1); assertThat( filter.toString(), - containsString("DoubleArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0.0, 10.0, 0.002, 1.0E9]]") + containsString( + "DoubleArrayBlock[positions=2, mvOrdering=UNORDERED, " + + "vector=DoubleArrayVector[positions=4, values=[0.0, 10.0, 0.002, 1.0E9]]]" + ) ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); @@ -338,7 +354,10 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendBytesRef(new BytesRef("pig")).appendBytesRef(new BytesRef("chicken")).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("BytesRefArrayBlock[positions=2, mvOrdering=UNORDERED, values=4]")); + assertThat( + filter.toString(), + containsString("BytesRefArrayBlock[positions=2, mvOrdering=UNORDERED, vector=BytesRefArrayVector[positions=4]]") + ); assertThat(filter.getPositionCount(), equalTo(2)); Releasables.close(builder, block); releaseAndAssertBreaker(filter); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index 1ee07386e191f..6c1be6231e82c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -14,17 +14,17 @@ public class IntBlockEqualityTests extends ComputeTestCase { - static final BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( blockFactory.newIntArrayVector(new int[] {}, 0), blockFactory.newIntArrayVector(new int[] { 0 }, 0), - IntBlock.newConstantBlockWith(0, 0).asVector(), - IntBlock.newConstantBlockWith(0, 0).filter().asVector(), - IntBlock.newBlockBuilder(0).build().asVector(), - IntBlock.newBlockBuilder(0).appendInt(1).build().asVector().filter() + blockFactory.newConstantIntVector(0, 0), + blockFactory.newConstantIntVector(0, 0).filter(), + blockFactory.newIntBlockBuilder(0).build().asVector(), + blockFactory.newIntBlockBuilder(0).appendInt(1).build().asVector().filter() ); assertAllEquals(vectors); } @@ -46,10 +46,10 @@ public void testEmptyBlock() { BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), - IntBlock.newConstantBlockWith(0, 0), - IntBlock.newBlockBuilder(0).build(), - IntBlock.newBlockBuilder(0).appendInt(1).build().filter(), - IntBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantIntBlockWith(0, 0), + blockFactory.newIntBlockBuilder(0).build(), + blockFactory.newIntBlockBuilder(0).appendInt(1).build().filter(), + blockFactory.newIntBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -64,10 +64,10 @@ public void testVectorEquality() { blockFactory.newIntArrayVector(new int[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), blockFactory.newIntArrayVector(new int[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), blockFactory.newIntArrayVector(new int[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().asVector().filter(0, 2, 3) + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().asVector().filter(0, 2, 3) ); assertAllEquals(vectors); @@ -80,11 +80,11 @@ public void testVectorEquality() { blockFactory.newIntArrayVector(new int[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), blockFactory.newIntArrayVector(new int[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), blockFactory.newIntArrayVector(new int[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - IntBlock.newConstantBlockWith(1, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().filter(0, 2, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().asVector().filter(0, 2, 3) + blockFactory.newConstantIntBlockWith(1, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().filter(0, 2, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().asVector().filter(0, 2, 3) ); assertAllEquals(moreVectors); } @@ -113,10 +113,10 @@ public void testBlockEquality() { new IntArrayVector(new int[] { 1, 2, 3, 4 }, 3, blockFactory).filter(0, 1, 2).asBlock(), new IntArrayVector(new int[] { 1, 2, 3, 4 }, 4, blockFactory).filter(0, 1, 2).asBlock(), new IntArrayVector(new int[] { 1, 2, 4, 3 }, 4, blockFactory).filter(0, 1, 3).asBlock(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3), - IntBlock.newBlockBuilder(3).appendInt(1).appendNull().appendInt(2).appendInt(3).build().filter(0, 2, 3) + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendNull().appendInt(2).appendInt(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); @@ -141,11 +141,11 @@ public void testBlockEquality() { blockFactory.newIntArrayVector(new int[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), blockFactory.newIntArrayVector(new int[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), blockFactory.newIntArrayVector(new int[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - IntBlock.newConstantBlockWith(9, 2), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(9).build(), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(9).build().filter(0, 1), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(4).appendInt(9).build().filter(0, 2), - IntBlock.newBlockBuilder(2).appendInt(9).appendNull().appendInt(9).build().filter(0, 2) + blockFactory.newConstantIntBlockWith(9, 2), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(9).build(), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(9).build().filter(0, 1), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(4).appendInt(9).build().filter(0, 2), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendNull().appendInt(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -158,10 +158,10 @@ public void testVectorInequality() { blockFactory.newIntArrayVector(new int[] { 1, 2 }, 2), blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3), blockFactory.newIntArrayVector(new int[] { 1, 2, 4 }, 3), - IntBlock.newConstantBlockWith(9, 2).asVector(), - IntBlock.newBlockBuilder(2).appendInt(1).appendInt(2).build().asVector().filter(1), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build().asVector(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build().asVector() + blockFactory.newConstantIntBlockWith(9, 2).asVector(), + blockFactory.newIntBlockBuilder(2).appendInt(1).appendInt(2).build().asVector().filter(1), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build().asVector(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -174,22 +174,22 @@ public void testBlockInequality() { blockFactory.newIntArrayVector(new int[] { 1, 2 }, 2).asBlock(), blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock(), blockFactory.newIntArrayVector(new int[] { 1, 2, 4 }, 3).asBlock(), - IntBlock.newConstantBlockWith(9, 2), - IntBlock.newBlockBuilder(2).appendInt(1).appendInt(2).build().filter(1), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(3).build(), - IntBlock.newBlockBuilder(3).appendInt(1).beginPositionEntry().appendInt(2).appendInt(3).build() + blockFactory.newConstantIntBlockWith(9, 2), + blockFactory.newIntBlockBuilder(2).appendInt(1).appendInt(2).build().filter(1), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(3).build(), + blockFactory.newIntBlockBuilder(3).appendInt(1).beginPositionEntry().appendInt(2).appendInt(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build() + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -200,8 +200,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -218,8 +218,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - IntBlock.newBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build(), - IntBlock.newBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build() + blockFactory.newIntBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build(), + blockFactory.newIntBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build() ); assertEquals(1, blocks.get(0).getPositionCount()); assertEquals(2, blocks.get(0).getValueCount(0)); @@ -229,9 +229,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder3 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder3 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index a989a5ddb1ea1..27a2f9702a0ae 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -14,17 +14,17 @@ public class LongBlockEqualityTests extends ComputeTestCase { - static final BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( blockFactory.newLongArrayVector(new long[] {}, 0), blockFactory.newLongArrayVector(new long[] { 0 }, 0), - LongBlock.newConstantBlockWith(0, 0).asVector(), - LongBlock.newConstantBlockWith(0, 0).filter().asVector(), - LongBlock.newBlockBuilder(0).build().asVector(), - LongBlock.newBlockBuilder(0).appendLong(1).build().asVector().filter() + blockFactory.newConstantLongBlockWith(0, 0).asVector(), + blockFactory.newConstantLongBlockWith(0, 0).filter().asVector(), + blockFactory.newLongBlockBuilder(0).build().asVector(), + blockFactory.newLongBlockBuilder(0).appendLong(1).build().asVector().filter() ); assertAllEquals(vectors); } @@ -46,10 +46,10 @@ public void testEmptyBlock() { BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), - LongBlock.newConstantBlockWith(0, 0), - LongBlock.newBlockBuilder(0).build(), - LongBlock.newBlockBuilder(0).appendLong(1).build().filter(), - LongBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantLongBlockWith(0, 0), + blockFactory.newLongBlockBuilder(0).build(), + blockFactory.newLongBlockBuilder(0).appendLong(1).build().filter(), + blockFactory.newLongBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -64,10 +64,10 @@ public void testVectorEquality() { blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), blockFactory.newLongArrayVector(new long[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), blockFactory.newLongArrayVector(new long[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().asVector().filter(0, 2, 3) + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().asVector().filter(0, 2, 3) ); assertAllEquals(vectors); @@ -80,11 +80,11 @@ public void testVectorEquality() { blockFactory.newLongArrayVector(new long[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), blockFactory.newLongArrayVector(new long[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), blockFactory.newLongArrayVector(new long[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - LongBlock.newConstantBlockWith(1, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().filter(0, 2, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().asVector().filter(0, 2, 3) + blockFactory.newConstantLongBlockWith(1, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().filter(0, 2, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().asVector().filter(0, 2, 3) ); assertAllEquals(moreVectors); } @@ -111,10 +111,10 @@ public void testBlockEquality() { blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), blockFactory.newLongArrayVector(new long[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3), - LongBlock.newBlockBuilder(3).appendLong(1).appendNull().appendLong(2).appendLong(3).build().filter(0, 2, 3) + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendNull().appendLong(2).appendLong(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); @@ -139,11 +139,11 @@ public void testBlockEquality() { blockFactory.newLongArrayVector(new long[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), blockFactory.newLongArrayVector(new long[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), blockFactory.newLongArrayVector(new long[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - LongBlock.newConstantBlockWith(9, 2), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(9).build(), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(9).build().filter(0, 1), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(4).appendLong(9).build().filter(0, 2), - LongBlock.newBlockBuilder(2).appendLong(9).appendNull().appendLong(9).build().filter(0, 2) + blockFactory.newConstantLongBlockWith(9, 2), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(9).build(), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(9).build().filter(0, 1), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(4).appendLong(9).build().filter(0, 2), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendNull().appendLong(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -156,10 +156,10 @@ public void testVectorInequality() { blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2), blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3), blockFactory.newLongArrayVector(new long[] { 1, 2, 4 }, 3), - LongBlock.newConstantBlockWith(9, 2).asVector(), - LongBlock.newBlockBuilder(2).appendLong(1).appendLong(2).build().asVector().filter(1), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build().asVector(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build().asVector() + blockFactory.newConstantLongBlockWith(9, 2).asVector(), + blockFactory.newLongBlockBuilder(2).appendLong(1).appendLong(2).build().asVector().filter(1), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build().asVector(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -172,22 +172,22 @@ public void testBlockInequality() { blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2).asBlock(), blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock(), blockFactory.newLongArrayVector(new long[] { 1, 2, 4 }, 3).asBlock(), - LongBlock.newConstantBlockWith(9, 2), - LongBlock.newBlockBuilder(2).appendLong(1).appendLong(2).build().filter(1), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(3).build(), - LongBlock.newBlockBuilder(3).appendLong(1).beginPositionEntry().appendLong(2).appendLong(3).build() + blockFactory.newConstantLongBlockWith(9, 2), + blockFactory.newLongBlockBuilder(2).appendLong(1).appendLong(2).build().filter(1), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(3).build(), + blockFactory.newLongBlockBuilder(3).appendLong(1).beginPositionEntry().appendLong(2).appendLong(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build() + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -198,8 +198,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -216,8 +216,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - LongBlock.newBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build(), - LongBlock.newBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build() + blockFactory.newLongBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build(), + blockFactory.newLongBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build() ); assertEquals(1, blocks.get(0).getPositionCount()); assertEquals(2, blocks.get(0).getValueCount(0)); @@ -227,9 +227,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder3 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder3 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java index f067999a04ff1..d3572377912ac 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java @@ -23,7 +23,7 @@ public class MultiValueBlockTests extends SerializationTestCase { public void testIntBlockTrivial1() { - var blockBuilder = IntBlock.newBlockBuilder(4); + var blockBuilder = blockFactory.newIntBlockBuilder(4); blockBuilder.appendInt(10); blockBuilder.beginPositionEntry(); blockBuilder.appendInt(21); @@ -54,10 +54,11 @@ public void testIntBlockTrivial1() { // cannot get a Vector view assertNull(block.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); + block.close(); } public void testIntBlockTrivial() { - var blockBuilder = IntBlock.newBlockBuilder(10); + var blockBuilder = blockFactory.newIntBlockBuilder(10); blockBuilder.appendInt(1); blockBuilder.beginPositionEntry(); blockBuilder.appendInt(21); @@ -79,57 +80,66 @@ public void testIntBlockTrivial() { assertThat(block.getInt(block.getFirstValueIndex(0)), is(1)); assertNull(block.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); + block.close(); } public void testEmpty() { for (int initialSize : new int[] { 0, 10, 100, randomInt(512) }) { - IntBlock intBlock = IntBlock.newBlockBuilder(initialSize).build(); + IntBlock intBlock = blockFactory.newIntBlockBuilder(initialSize).build(); assertThat(intBlock.getPositionCount(), is(0)); assertThat(intBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); + intBlock.close(); - LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).build(); + LongBlock longBlock = blockFactory.newLongBlockBuilder(initialSize).build(); assertThat(longBlock.getPositionCount(), is(0)); assertThat(longBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); + longBlock.close(); - DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).build(); + DoubleBlock doubleBlock = blockFactory.newDoubleBlockBuilder(initialSize).build(); assertThat(doubleBlock.getPositionCount(), is(0)); assertThat(doubleBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); + doubleBlock.close(); - BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).build(); + BytesRefBlock bytesRefBlock = blockFactory.newBytesRefBlockBuilder(initialSize).build(); assertThat(bytesRefBlock.getPositionCount(), is(0)); assertThat(bytesRefBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); + bytesRefBlock.close(); } } public void testNullOnly() throws IOException { for (int initialSize : new int[] { 0, 10, 100, randomInt(512) }) { - IntBlock intBlock = IntBlock.newBlockBuilder(initialSize).appendNull().build(); + IntBlock intBlock = blockFactory.newIntBlockBuilder(initialSize).appendNull().build(); assertThat(intBlock.getPositionCount(), is(1)); assertThat(intBlock.getValueCount(0), is(0)); assertNull(intBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); + intBlock.close(); - LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).appendNull().build(); + LongBlock longBlock = blockFactory.newLongBlockBuilder(initialSize).appendNull().build(); assertThat(longBlock.getPositionCount(), is(1)); assertThat(longBlock.getValueCount(0), is(0)); assertNull(longBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); + longBlock.close(); - DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).appendNull().build(); + DoubleBlock doubleBlock = blockFactory.newDoubleBlockBuilder(initialSize).appendNull().build(); assertThat(doubleBlock.getPositionCount(), is(1)); assertThat(doubleBlock.getValueCount(0), is(0)); assertNull(doubleBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); + doubleBlock.close(); - BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).appendNull().build(); + BytesRefBlock bytesRefBlock = blockFactory.newBytesRefBlockBuilder(initialSize).appendNull().build(); assertThat(bytesRefBlock.getPositionCount(), is(1)); assertThat(bytesRefBlock.getValueCount(0), is(0)); assertNull(bytesRefBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); + bytesRefBlock.close(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index d9377a490368d..a2b074c1403a0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -29,7 +29,7 @@ public abstract class TestBlockBuilder implements Block.Builder { public abstract TestBlockBuilder endPositionEntry(); public static Block blockFromValues(List> blockValues, ElementType elementType) { - TestBlockBuilder builder = builderOf(elementType); + TestBlockBuilder builder = builderOf(TestBlockFactory.getNonBreakingInstance(), elementType); for (List rowValues : blockValues) { if (rowValues.isEmpty()) { builder.appendNull(); @@ -47,7 +47,7 @@ public static Block blockFromValues(List> blockValues, ElementType // Builds a block of single values. Each value can be null or non-null. // Differs from blockFromValues, as it does not use begin/endPositionEntry public static Block blockFromSingleValues(List blockValues, ElementType elementType) { - TestBlockBuilder builder = builderOf(elementType); + TestBlockBuilder builder = builderOf(TestBlockFactory.getNonBreakingInstance(), elementType); for (Object rowValue : blockValues) { if (rowValue == null) { builder.appendNull(); @@ -58,39 +58,23 @@ public static Block blockFromSingleValues(List blockValues, ElementType return builder.build(); } - static TestBlockBuilder builderOf(ElementType type) { + static TestBlockBuilder builderOf(BlockFactory blockFactory, ElementType type) { return switch (type) { - case INT -> new TestIntBlockBuilder(0); - case LONG -> new TestLongBlockBuilder(0); - case DOUBLE -> new TestDoubleBlockBuilder(0); - case BYTES_REF -> new TestBytesRefBlockBuilder(0); - case BOOLEAN -> new TestBooleanBlockBuilder(0); + case INT -> new TestIntBlockBuilder(blockFactory, 0); + case LONG -> new TestLongBlockBuilder(blockFactory, 0); + case DOUBLE -> new TestDoubleBlockBuilder(blockFactory, 0); + case BYTES_REF -> new TestBytesRefBlockBuilder(blockFactory, 0); + case BOOLEAN -> new TestBooleanBlockBuilder(blockFactory, 0); default -> throw new AssertionError(type); }; } - static TestBlockBuilder ofInt(int estimatedSize) { - return new TestIntBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofLong(int estimatedSize) { - return new TestLongBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofDouble(int estimatedSize) { - return new TestDoubleBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofBytesRef(int estimatedSize) { - return new TestBytesRefBlockBuilder(estimatedSize); - } - private static class TestIntBlockBuilder extends TestBlockBuilder { private final IntBlock.Builder builder; - TestIntBlockBuilder(int estimatedSize) { - builder = IntBlock.newBlockBuilder(estimatedSize); + TestIntBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newIntBlockBuilder(estimatedSize); } @Override @@ -150,8 +134,8 @@ private static class TestLongBlockBuilder extends TestBlockBuilder { private final LongBlock.Builder builder; - TestLongBlockBuilder(int estimatedSize) { - builder = LongBlock.newBlockBuilder(estimatedSize); + TestLongBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newLongBlockBuilder(estimatedSize); } @Override @@ -211,8 +195,8 @@ private static class TestDoubleBlockBuilder extends TestBlockBuilder { private final DoubleBlock.Builder builder; - TestDoubleBlockBuilder(int estimatedSize) { - builder = DoubleBlock.newBlockBuilder(estimatedSize); + TestDoubleBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newDoubleBlockBuilder(estimatedSize); } @Override @@ -272,8 +256,8 @@ private static class TestBytesRefBlockBuilder extends TestBlockBuilder { private final BytesRefBlock.Builder builder; - TestBytesRefBlockBuilder(int estimatedSize) { - builder = BytesRefBlock.newBlockBuilder(estimatedSize); + TestBytesRefBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newBytesRefBlockBuilder(estimatedSize); } @Override @@ -333,8 +317,8 @@ private static class TestBooleanBlockBuilder extends TestBlockBuilder { private final BooleanBlock.Builder builder; - TestBooleanBlockBuilder(int estimatedSize) { - builder = BooleanBlock.newBlockBuilder(estimatedSize); + TestBooleanBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newBooleanBlockBuilder(estimatedSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java new file mode 100644 index 0000000000000..5b7072ab6476d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; + +public class TestBlockFactory { + + private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( + new NoopCircuitBreaker("test-noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + /** + * Returns the Non-Breaking block factory. + */ + public static BlockFactory getNonBreakingInstance() { + return NON_BREAKING; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java index 04ccf47ea6122..096db174a2580 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -114,11 +114,11 @@ public void testCranky() { private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { return switch (elementType) { case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); - case BOOLEAN -> BooleanVector.newVectorBuilder(estimatedSize, blockFactory); - case BYTES_REF -> BytesRefVector.newVectorBuilder(estimatedSize, blockFactory); - case DOUBLE -> DoubleVector.newVectorBuilder(estimatedSize, blockFactory); - case INT -> IntVector.newVectorBuilder(estimatedSize, blockFactory); - case LONG -> LongVector.newVectorBuilder(estimatedSize, blockFactory); + case BOOLEAN -> blockFactory.newBooleanVectorBuilder(estimatedSize); + case BYTES_REF -> blockFactory.newBytesRefVectorBuilder(estimatedSize); + case DOUBLE -> blockFactory.newDoubleVectorBuilder(estimatedSize); + case INT -> blockFactory.newIntVectorBuilder(estimatedSize); + case LONG -> blockFactory.newLongVectorBuilder(estimatedSize); }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 3c46fef7e5257..cdfc7611ec678 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -116,10 +116,10 @@ public void testCranky() { private Vector.Builder vectorBuilder(int size, BlockFactory blockFactory) { return switch (elementType) { case NULL, BYTES_REF, DOC, UNKNOWN -> throw new UnsupportedOperationException(); - case BOOLEAN -> BooleanVector.newVectorFixedBuilder(size, blockFactory); - case DOUBLE -> DoubleVector.newVectorFixedBuilder(size, blockFactory); - case INT -> IntVector.newVectorFixedBuilder(size, blockFactory); - case LONG -> LongVector.newVectorFixedBuilder(size, blockFactory); + case BOOLEAN -> blockFactory.newBooleanVectorFixedBuilder(size); + case DOUBLE -> blockFactory.newDoubleVectorFixedBuilder(size); + case INT -> blockFactory.newIntVectorFixedBuilder(size); + case LONG -> blockFactory.newLongVectorFixedBuilder(size); }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index d6edc903607cc..9acf188a4010d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -27,15 +26,11 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.junit.After; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @@ -44,7 +39,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class LuceneCountOperatorTests extends AnyOperatorTestCase { @@ -89,10 +83,8 @@ private LuceneCountOperator.Factory simple(BigArrays bigArrays, DataPartitioning throw new RuntimeException(e); } - SearchContext ctx = mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getIndexReader()).thenReturn(reader); + SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); final Query query; if (enableShortcut && randomBoolean()) { query = new MatchAllDocsQuery(); @@ -185,25 +177,4 @@ private void testCount(Supplier contexts, int size, int limit) { assertThat(totalCount, equalTo((long) size)); } } - - /** - * Creates a mock search context with the given index reader. - * The returned mock search context can be used to test with {@link LuceneOperator}. - */ - public static SearchContext mockSearchContext(IndexReader reader) { - try { - ContextIndexSearcher searcher = new ContextIndexSearcher( - reader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - TrivialQueryCachingPolicy.NEVER, - true - ); - SearchContext searchContext = mock(SearchContext.class); - when(searchContext.searcher()).thenReturn(searcher); - return searchContext; - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index fad1f793122d8..eab3e855d01ab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -12,20 +12,39 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + import static org.hamcrest.Matchers.equalTo; public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public static LuceneSourceOperator.Status simple() { - return new LuceneSourceOperator.Status(0, 0, 1, 5, 123, 99990, 8000); + return new LuceneSourceOperator.Status(2, Set.of("*:*"), new TreeSet<>(List.of("a:0", "a:1")), 0, 1, 5, 123, 99990, 8000); } public static String simpleToJson() { return """ - {"processed_slices":0,"slice_index":0,"total_slices":1,"pages_emitted":5,"slice_min":123,"slice_max":99990,"current":8000}"""; + { + "processed_slices" : 2, + "processed_queries" : [ + "*:*" + ], + "processed_shards" : [ + "a:0", + "a:1" + ], + "slice_index" : 0, + "total_slices" : 1, + "pages_emitted" : 5, + "slice_min" : 123, + "slice_max" : 99990, + "current" : 8000 + }"""; } public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } @Override @@ -37,6 +56,8 @@ protected Writeable.Reader instanceReader() { public LuceneSourceOperator.Status createTestInstance() { return new LuceneSourceOperator.Status( randomNonNegativeInt(), + randomProcessedQueries(), + randomProcessedShards(), randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt(), @@ -46,26 +67,58 @@ public LuceneSourceOperator.Status createTestInstance() { ); } + private static Set randomProcessedQueries() { + int size = between(0, 10); + Set set = new TreeSet<>(); + while (set.size() < size) { + set.add(randomAlphaOfLength(5)); + } + return set; + } + + private static Set randomProcessedShards() { + int size = between(0, 10); + Set set = new TreeSet<>(); + while (set.size() < size) { + set.add(randomAlphaOfLength(3) + ":" + between(0, 10)); + } + return set; + } + @Override protected LuceneSourceOperator.Status mutateInstance(LuceneSourceOperator.Status instance) { int processedSlices = instance.processedSlices(); + Set processedQueries = instance.processedQueries(); + Set processedShards = instance.processedShards(); int sliceIndex = instance.sliceIndex(); int totalSlices = instance.totalSlices(); int pagesEmitted = instance.pagesEmitted(); int sliceMin = instance.sliceMin(); int sliceMax = instance.sliceMax(); int current = instance.current(); - switch (between(0, 6)) { + switch (between(0, 8)) { case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt); - case 1 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); - case 2 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); - case 3 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); - case 4 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); - case 5 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); - case 6 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); + case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries); + case 2 -> processedQueries = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards); + case 3 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); + case 4 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); + case 5 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 6 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); + case 7 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); + case 8 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); default -> throw new UnsupportedOperationException(); } ; - return new LuceneSourceOperator.Status(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current); + return new LuceneSourceOperator.Status( + processedSlices, + processedQueries, + processedShards, + sliceIndex, + totalSlices, + pagesEmitted, + sliceMin, + sliceMax, + current + ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 41fe1a93d9c8b..74e9d7b122718 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -96,23 +97,21 @@ private LuceneSourceOperator.Factory simple(BigArrays bigArrays, DataPartitionin } SearchContext ctx = mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getFieldType(anyString())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { String name = inv.getArgument(0); return switch (name) { case "s" -> S_FIELD; default -> throw new IllegalArgumentException("don't support [" + name + "]"); }; }); - when(ectx.getForField(any(), any())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { MappedFieldType ft = inv.getArgument(0); IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); return builder.build(new IndexFieldDataCache.None(), bigArrays.breakerService()); }); - when(ectx.nestedScope()).thenReturn(new NestedScope()); - when(ectx.nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ectx.getIndexReader()).thenReturn(reader); + when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); + when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); Function queryFunction = c -> new MatchAllDocsQuery(); int maxPageSize = between(10, Math.max(10, numDocs)); return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit); @@ -216,6 +215,10 @@ public static SearchContext mockSearchContext(IndexReader reader) { ); SearchContext searchContext = mock(SearchContext.class); when(searchContext.searcher()).thenReturn(searcher); + SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); + when(searchContext.getSearchExecutionContext()).thenReturn(searchExecutionContext); + when(searchExecutionContext.getFullyQualifiedIndex()).thenReturn(new Index("test", "uid")); + when(searchExecutionContext.getShardId()).thenReturn(0); return searchContext; } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index d1b9e706750df..445e3e0f80264 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.search.internal.SearchContext; @@ -49,7 +48,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { @@ -90,23 +88,21 @@ private LuceneTopNSourceOperator.Factory simple(BigArrays bigArrays, DataPartiti } SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getFieldType(anyString())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { String name = inv.getArgument(0); return switch (name) { case "s" -> S_FIELD; default -> throw new IllegalArgumentException("don't support [" + name + "]"); }; }); - when(ectx.getForField(any(), any())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { MappedFieldType ft = inv.getArgument(0); IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); return builder.build(new IndexFieldDataCache.None(), bigArrays.breakerService()); }); - when(ectx.nestedScope()).thenReturn(new NestedScope()); - when(ectx.nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ectx.getIndexReader()).thenReturn(reader); + when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); + when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); Function queryFunction = c -> new MatchAllDocsQuery(); int taskConcurrency = 0; int maxPageSize = between(10, Math.max(10, size)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java index 6f0317b509e3b..1851f7ac948cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java @@ -24,11 +24,16 @@ public static ValuesSourceReaderOperator.Status simple() { public static String simpleToJson() { return """ - {"readers_built":{"ReaderType":3},"pages_processed":123}"""; + { + "readers_built" : { + "ReaderType" : 3 + }, + "pages_processed" : 123 + }"""; } public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 2133395ef532b..ed99245053232 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -272,9 +272,9 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big arrays so can't break", false); - return null; + protected ByteSizeValue memoryLimitForSimple() { + assumeFalse("strange exception in the test, fix soon", true); + return ByteSizeValue.ofKb(1); } public void testLoadAll() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 784d5134e9608..58169cb4cfda6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -83,8 +83,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big array so never breaks", false); - return null; + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofBytes(50); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index 47febc09e45f5..01f51b32edb1d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -9,7 +9,9 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -47,7 +49,7 @@ public static Page mergePages(List pages) { Block.Builder[] builders = new Block.Builder[first.getBlockCount()]; try { for (int b = 0; b < builders.length; b++) { - builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions); + builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions, TestBlockFactory.getNonBreakingInstance()); } for (Page p : pages) { for (int b = 0; b < builders.length; b++) { @@ -79,11 +81,12 @@ public static Page mergePages(List pages) { */ public static List deepCopyOf(List pages) { List out = new ArrayList<>(pages.size()); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); for (Page p : pages) { Block[] blocks = new Block[p.getBlockCount()]; for (int b = 0; b < blocks.length; b++) { Block orig = p.getBlock(b); - Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount()); + Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory); builder.copyFrom(orig, 0, p.getPositionCount()); blocks[b] = builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 485610f5842bb..f28a982824afa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -58,7 +58,7 @@ public Block eval(Page page) { BytesRefBlock input = page.getBlock(0); for (int i = 0; i < input.getPositionCount(); i++) { if (input.getBytesRef(i, new BytesRef()).utf8ToString().startsWith("no_")) { - return Block.constantNullBlock(input.getPositionCount(), input.blockFactory()); + return input.blockFactory().newConstantNullBlock(input.getPositionCount()); } } input.incRef(); @@ -97,15 +97,15 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 32)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofKb(15); } public void testAllNullValues() { DriverContext driverContext = driverContext(); BytesRef scratch = new BytesRef(); - Block input1 = BytesRefBlock.newBlockBuilder(1, driverContext.blockFactory()).appendBytesRef(new BytesRef("can_match")).build(); - Block input2 = BytesRefBlock.newBlockBuilder(1, driverContext.blockFactory()).appendBytesRef(new BytesRef("no_match")).build(); + Block input1 = driverContext.blockFactory().newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("can_match")).build(); + Block input2 = driverContext.blockFactory().newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("no_match")).build(); List inputPages = List.of(new Page(input1), new Page(input2)); List outputPages = drive(simple(driverContext.bigArrays()).get(driverContext), inputPages.iterator(), driverContext); BytesRefBlock output1 = outputPages.get(0).getBlock(1); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java index 37df6adad9e12..ce62fb9896eba 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java @@ -44,8 +44,18 @@ protected final BigArrays nonBreakingBigArrays() { return new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofBytes(Integer.MAX_VALUE)).withCircuitBreaking(); } - protected BlockFactory blockFactory() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + /** + * Build a {@link BlockFactory} with a huge limit. + */ + protected final BlockFactory blockFactory() { + return blockFactory(ByteSizeValue.ofGb(1)); + } + + /** + * Build a {@link BlockFactory} with a configured limit. + */ + protected final BlockFactory blockFactory(ByteSizeValue limit) { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); BlockFactory factory = new MockBlockFactory(breaker, bigArrays); @@ -53,7 +63,10 @@ protected BlockFactory blockFactory() { return factory; } - protected BlockFactory crankyBlockFactory() { + /** + * Build a {@link BlockFactory} that randomly fails. + */ + protected final BlockFactory crankyBlockFactory() { CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); @@ -64,7 +77,7 @@ protected BlockFactory crankyBlockFactory() { } @After - public void allBreakersEmpty() throws Exception { + public final void allBreakersEmpty() throws Exception { // first check that all big arrays are released, which can affect breakers MockBigArrays.ensureAllArraysAreReleased(); for (var factory : blockFactories) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java index 27076c2adf2d2..a3af5aafcbee3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -167,7 +167,7 @@ static class AssertingDriverContext extends DriverContext { AssertingDriverContext() { super( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index f6b4fbc817940..ec9952cdce022 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -30,19 +30,23 @@ public void testToXContent() { new DriverStatus.OperatorStatus("ValuesSourceReader", ValuesSourceReaderOperatorStatusTests.simple()) ) ); - assertThat( - Strings.toString(status), - equalTo( - """ - {"operators":[""" - + """ - {"operator":"LuceneSource","status":""" - + LuceneSourceOperatorStatusTests.simpleToJson() - + "},{\"operator\":\"ValuesSourceReader\",\"status\":" - + ValuesSourceReaderOperatorStatusTests.simpleToJson() - + "}]}" - ) - ); + assertThat(Strings.toString(status, true, true), equalTo(""" + { + "operators" : [ + { + "operator" : "LuceneSource", + "status" : + """.stripTrailing() + " " + LuceneSourceOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + }, + { + "operator" : "ValuesSourceReader", + "status" : + """.stripTrailing() + " " + ValuesSourceReaderOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ] + }""")); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index cdae4283540c4..c10bcf8d49ca4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -39,15 +39,34 @@ public void testToXContent() { ), List.of(new DriverStatus.OperatorStatus("ExchangeSink", ExchangeSinkOperatorStatusTests.simple())) ); - assertThat(Strings.toString(status), equalTo(""" - {"sessionId":"ABC:123","last_updated":"1973-11-29T09:27:23.214Z","status":"running", - """.trim() + """ - "completed_operators":[{"operator":"LuceneSource","status": - """.trim() + LuceneSourceOperatorStatusTests.simpleToJson() + """ - },{"operator":"ValuesSourceReader","status": - """.trim() + ValuesSourceReaderOperatorStatusTests.simpleToJson() + """ - }],"active_operators":[{"operator":"ExchangeSink","status": - """.trim() + ExchangeSinkOperatorStatusTests.simpleToJson() + "}]}")); + assertThat(Strings.toString(status, true, true), equalTo(""" + { + "sessionId" : "ABC:123", + "last_updated" : "1973-11-29T09:27:23.214Z", + "status" : "running", + "completed_operators" : [ + { + "operator" : "LuceneSource", + "status" : + """.trim() + " " + LuceneSourceOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + }, + { + "operator" : "ValuesSourceReader", + "status" : + """.stripTrailing() + " " + ValuesSourceReaderOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ], + "active_operators" : [ + { + "operator" : "ExchangeSink", + "status" : + """.stripTrailing() + " " + ExchangeSinkOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ] + }""")); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index c755c5eafe08d..29e4404d43482 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -37,7 +37,7 @@ record Addition(DriverContext driverContext, int lhs, int rhs) implements EvalOp public Block eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - try (LongVector.FixedBuilder result = LongVector.newVectorFixedBuilder(page.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { result.appendLong(lhsVector.getLong(p) + rhsVector.getLong(p)); } @@ -117,7 +117,7 @@ public void testReadFromBlock() { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 8000)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofKb(4); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index d067435ba9aaa..3c09d0100b366 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -36,7 +36,7 @@ record SameLastDigit(DriverContext context, int lhs, int rhs) implements EvalOpe public Block eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - BooleanVector.FixedBuilder result = BooleanVector.newVectorFixedBuilder(page.getPositionCount(), context.blockFactory()); + BooleanVector.FixedBuilder result = context.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount()); for (int p = 0; p < page.getPositionCount(); p++) { result.appendBoolean(lhsVector.getLong(p) % 10 == rhsVector.getLong(p) % 10); } @@ -116,7 +116,7 @@ public void testReadFromBlock() { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 600)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofKb(1); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 9403d22f2b4c4..3986c4b337e03 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; @@ -61,7 +61,7 @@ public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); try ( Driver d = new Driver( @@ -85,7 +85,7 @@ public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( @@ -107,7 +107,7 @@ public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); try ( @@ -133,7 +133,7 @@ public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); Collections.shuffle(partials, random()); @@ -163,7 +163,7 @@ public final void testManyInitialManyPartialFinal() { public final void testManyInitialManyPartialFinalRunner() { BigArrays bigArrays = nonBreakingBigArrays(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, false /* no throwing ops */); var runner = new DriverRunner(threadPool.getThreadContext()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index b1ef784ca339c..120f6e2b6e6bd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -95,8 +95,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 32)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofKb(1); } - } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 8c85f5927196f..fdceb6ead36dd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -51,8 +51,8 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeFalse("doesn't use big arrays", true); + protected ByteSizeValue memoryLimitForSimple() { + assumeFalse("doesn't allocate, just filters", true); return null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index 1989d27351aa3..2670d739b1edc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -396,7 +397,7 @@ private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder en * This produces a block with a single value per position, but it's good enough * for comparison. */ - Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset)); + Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset), TestBlockFactory.getNonBreakingInstance()); BytesRef[] toDecode = new BytesRef[encoder.valueCount(offset)]; for (int i = 0; i < toDecode.length; i++) { BytesRefBuilder dest = new BytesRefBuilder(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 3572dc620287d..ff84f1fc7ae3e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -12,9 +12,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import java.util.Iterator; import java.util.List; @@ -202,16 +201,17 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big arrays so can't break", false); - return null; + protected ByteSizeValue memoryLimitForSimple() { + assumeFalse("doesn't throw in tests but probably should", true); + return ByteSizeValue.ofBytes(1); } public void testNoopStatus() { + BlockFactory blockFactory = blockFactory(); MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1000)); List result = drive( op, - List.of(new Page(IntVector.newVectorBuilder(2).appendInt(1).appendInt(2).build().asBlock())).iterator(), + List.of(new Page(blockFactory.newIntVectorBuilder(2).appendInt(1).appendInt(2).build().asBlock())).iterator(), driverContext() ); assertThat(result, hasSize(1)); @@ -224,7 +224,8 @@ public void testNoopStatus() { public void testExpandStatus() { MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1)); - var builder = IntBlock.newBlockBuilder(2).beginPositionEntry().appendInt(1).appendInt(2).endPositionEntry(); + BlockFactory blockFactory = blockFactory(); + var builder = blockFactory.newIntBlockBuilder(2).beginPositionEntry().appendInt(1).appendInt(2).endPositionEntry(); List result = drive(op, List.of(new Page(builder.build())).iterator(), driverContext()); assertThat(result, hasSize(1)); assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2)))); @@ -232,6 +233,7 @@ public void testExpandStatus() { assertThat(status.pagesIn(), equalTo(1)); assertThat(status.pagesOut(), equalTo(1)); assertThat(status.noops(), equalTo(0)); + result.forEach(Page::releaseBlocks); } public void testExpandWithBytesRefs() { @@ -253,7 +255,7 @@ protected Page createPage(int positionOffset, int length) { ); } }); - List origInput = deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive(new MvExpandOperator(0, randomIntBetween(1, 1000)), input.iterator(), context); assertSimpleOutput(origInput, results); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 2f1cc2981766e..de2c94f9f0a3a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; @@ -56,13 +57,6 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { */ protected abstract void assertSimpleOutput(List input, List results); - /** - * A {@link ByteSizeValue} that is so small any input to the operator - * will cause it to circuit break. If the operator can't break then - * throw an {@link AssumptionViolatedException}. - */ - protected abstract ByteSizeValue smallEnoughToCircuitBreak(); - /** * Test a small input set against {@link #simple}. Smaller input sets * are more likely to discover accidental behavior for clumped inputs. @@ -79,27 +73,55 @@ public final void testSimpleLargeInput() { } /** - * Run {@link #simple} with a circuit breaker configured by - * {@link #smallEnoughToCircuitBreak} and assert that it breaks - * in a sane way. + * A {@link ByteSizeValue} that is small enough that running {@link #simple} + * on {@link #simpleInput} will exhaust the breaker and throw a + * {@link CircuitBreakingException}. We should make an effort to make this + * number as large as possible and still cause a break consistently so we get + * good test coverage. If the operator can't break then throw an + * {@link AssumptionViolatedException}. + */ + protected abstract ByteSizeValue memoryLimitForSimple(); + + /** + * Run {@link #simple} with a circuit breaker limited to somewhere + * between 0 bytes and {@link #memoryLimitForSimple} and assert that + * it breaks in a sane way. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101824") public final void testSimpleCircuitBreaking() { + testSimpleCircuitBreaking(ByteSizeValue.ofBytes(randomLongBetween(0, memoryLimitForSimple().getBytes()))); + } + + /** + * Run {@link #simple} with a circuit breaker configured limited to + * {@link #memoryLimitForSimple} and assert that it breaks in a sane way. + *

+ * This test helps to make sure that the limits set by + * {@link #memoryLimitForSimple} aren't too large. + * {@link #testSimpleCircuitBreaking}, with it's random configured + * limit will use the actual maximum very rarely. + *

+ */ + public final void testSimpleCircuitBreakingAtLimit() { + testSimpleCircuitBreaking(memoryLimitForSimple()); + } + + private void testSimpleCircuitBreaking(ByteSizeValue limit) { /* * We build two CircuitBreakers - one for the input blocks and one for the operation itself. * The input blocks don't count against the memory usage for the limited operator that we * build. */ DriverContext inputFactoryContext = driverContext(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()) - .withCircuitBreaking(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); + Operator.OperatorFactory simple = simple(bigArrays); + logger.info("running {} with {}", simple, bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST)); List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); DriverContext driverContext = new DriverContext(bigArrays, blockFactory); boolean[] driverStarted = new boolean[1]; Exception e = expectThrows(CircuitBreakingException.class, () -> { - var operator = simple(bigArrays).get(driverContext); + var operator = simple.get(driverContext); driverStarted[0] = true; drive(operator, input.iterator(), driverContext); }); @@ -186,7 +208,7 @@ protected final void assertSimple(DriverContext context, int size) { } // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); BigArrays bigArrays = context.bigArrays().withCircuitBreaking(); List results = drive(simple(bigArrays).get(context), input.iterator(), context); @@ -270,10 +292,10 @@ public static void runDriver(List drivers) { drivers.add( new Driver( "dummy-session", - new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), + new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance()), () -> "dummy-driver", new SequenceLongBlockSourceOperator( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), LongStream.range(0, between(1, 100)), between(1, 100) ), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index 30f3bfda27d5e..15166ac525435 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -97,8 +97,8 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big arrays so can't break", false); + protected ByteSizeValue memoryLimitForSimple() { + assumeTrue("doesn't allocate", false); return null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java index c8250eba5703a..cd8a49939fbb5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java @@ -11,12 +11,12 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -28,7 +28,7 @@ public class RowOperatorTests extends ESTestCase { final DriverContext driverContext = new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); public void testBoolean() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index 7c1c62aea6ab9..b92c6d01e5077 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -37,12 +37,13 @@ public SequenceBooleanBlockSourceOperator(BlockFactory blockFactory, List @Override protected Page createPage(int positionOffset, int length) { - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(length, blockFactory); + DoubleVector.FixedBuilder builder = blockFactory.newDoubleVectorFixedBuilder(length); for (int i = 0; i < length; i++) { builder.appendDouble(values[positionOffset + i]); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 70ef2118fcef0..a90d6e71633e6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -85,8 +85,8 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { - return ByteSizeValue.ofBytes(between(1, 32)); + protected ByteSizeValue memoryLimitForSimple() { + return ByteSizeValue.ofKb(15); } public void testMultivalueDissectInput() { @@ -103,8 +103,9 @@ public Block eval(Page page) { public void close() {} }, new FirstWord("test"), driverContext()); - Page result = null; - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(1)) { + BlockFactory blockFactory = blockFactory(); + final Page result; + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(1)) { builder.beginPositionEntry(); builder.appendBytesRef(new BytesRef("foo1 bar1")); builder.appendBytesRef(new BytesRef("foo2 bar2")); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java index aaa3a6ac8a3c8..e2cb0e21938e2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java @@ -7,9 +7,9 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import java.util.function.Consumer; @@ -21,7 +21,7 @@ public class TestResultPageSinkOperator extends PageConsumerOperator { public TestResultPageSinkOperator(Consumer pageConsumer) { super(page -> { - Page copy = BlockTestUtils.deepCopyOf(page, BlockFactory.getNonBreakingInstance()); + Page copy = BlockTestUtils.deepCopyOf(page, TestBlockFactory.getNonBreakingInstance()); page.releaseBlocks(); pageConsumer.accept(copy); }); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 1f540d2a22cb8..8be6cdebad538 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -40,6 +41,7 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.transport.AbstractSimpleTransportTestCase; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; @@ -183,14 +185,15 @@ public Page getOutput() { return null; } int size = randomIntBetween(1, 10); - IntBlock.Builder builder = IntBlock.newBlockBuilder(size); - for (int i = 0; i < size; i++) { - int seqNo = nextSeqNo.incrementAndGet(); - if (seqNo < maxInputSeqNo) { - builder.appendInt(seqNo); + try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(size)) { + for (int i = 0; i < size; i++) { + int seqNo = nextSeqNo.incrementAndGet(); + if (seqNo < maxInputSeqNo) { + builder.appendInt(seqNo); + } } + return new Page(builder.build()); } - return new Page(builder.build()); } @Override @@ -372,9 +375,10 @@ public void testConcurrentWithTransportActions() throws Exception { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - ExchangeSourceHandler sourceHandler = exchange0.createSourceHandler(exchangeId, randomExchangeBuffer(), ESQL_TEST_EXECUTOR); + var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); - sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, node1.getLocalNode()), randomIntBetween(1, 5)); + Transport.Connection connection = node0.getConnection(node1.getLocalNode()); + sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); @@ -414,8 +418,8 @@ public void sendResponse(TransportResponse transportResponse) throws IOException } } } - ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); origResp.decRef(); + ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); super.sendResponse(newResp); } }; @@ -425,9 +429,10 @@ public void sendResponse(TransportResponse transportResponse) throws IOException try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - ExchangeSourceHandler sourceHandler = exchange0.createSourceHandler(exchangeId, randomIntBetween(1, 128), ESQL_TEST_EXECUTOR); + var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); - sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, node1.getLocalNode()), randomIntBetween(1, 5)); + Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); + sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); Exception err = expectThrows( Exception.class, () -> runConcurrentTest(maxSeqNo, maxSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink) @@ -435,6 +440,7 @@ public void sendResponse(TransportResponse transportResponse) throws IOException Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); + sinkHandler.onFailure(new RuntimeException(cause)); } } @@ -499,11 +505,18 @@ private BlockFactory blockFactory() { MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new BlockFactory(breaker, bigArrays); + MockBlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return factory; } + private final List blockFactories = new ArrayList<>(); + @After public void allMemoryReleased() { + for (MockBlockFactory blockFactory : blockFactories) { + blockFactory.ensureAllBlocksAreReleased(); + } for (CircuitBreaker breaker : breakers) { assertThat(breaker.getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java index 7438055284b14..369913c7d152c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java @@ -17,7 +17,7 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTestCase { public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } public static ExchangeSinkOperator.Status simple() { @@ -26,7 +26,9 @@ public static ExchangeSinkOperator.Status simple() { public static String simpleToJson() { return """ - {"pages_accepted":10}"""; + { + "pages_accepted" : 10 + }"""; } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index e3ae5e29f472c..a97294beac542 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.test.ESTestCase; @@ -34,6 +34,7 @@ public class ExtractorTests extends ESTestCase { @ParametersFactory public static Iterable parameters() { + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); List cases = new ArrayList<>(); for (ElementType e : ElementType.values()) { switch (e) { @@ -83,9 +84,9 @@ public static Iterable parameters() { e, TopNEncoder.DEFAULT_UNSORTABLE, () -> new DocVector( - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), randomBoolean() ? null : randomBoolean() ).asBlock() ) } @@ -113,7 +114,7 @@ static Object[] valueTestCase(String name, ElementType type, TopNEncoder encoder name, type, encoder, - () -> BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), Arrays.asList(value.get()))[0] + () -> BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), Arrays.asList(value.get()))[0] ) }; } @@ -154,7 +155,7 @@ public void testNotInKey() { assertThat(valuesBuilder.length(), greaterThan(0)); ResultBuilder result = ResultBuilder.resultBuilderFor( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), testCase.type, testCase.encoder.toUnsortable(), false, @@ -181,7 +182,7 @@ public void testInKey() { assertThat(valuesBuilder.length(), greaterThan(0)); ResultBuilder result = ResultBuilder.resultBuilderFor( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), testCase.type, testCase.encoder.toUnsortable(), true, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 0219f9a404d77..6ed6e7d9b8e29 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -18,14 +18,12 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockBuilder; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -181,12 +179,12 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue smallEnoughToCircuitBreak() { + protected ByteSizeValue memoryLimitForSimple() { /* * 775 causes us to blow up while collecting values and 780 doesn't - * trip the breaker. So 775 is the max on this range. + * trip the breaker. */ - return ByteSizeValue.ofBytes(between(1, 775)); + return ByteSizeValue.ofBytes(775); } public void testRamBytesUsed() { @@ -305,14 +303,14 @@ private List topNLong(List inputValues, int limit, boolean ascending } public void testCompareInts() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - IntBlock.newBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), - IntBlock.newBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), - IntBlock.newBlockBuilder(2).appendInt(0).appendInt(Integer.MAX_VALUE).build() } + blockFactory.newIntBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), + blockFactory.newIntBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), + blockFactory.newIntBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), + blockFactory.newIntBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), + blockFactory.newIntBlockBuilder(2).appendInt(0).appendInt(Integer.MAX_VALUE).build() ), INT, DEFAULT_SORTABLE @@ -320,14 +318,14 @@ public void testCompareInts() { } public void testCompareLongs() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - LongBlock.newBlockBuilder(2).appendLong(Long.MIN_VALUE).appendLong(randomLongBetween(-1000, -1)).build(), - LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(-1000, -1)).appendLong(0).build(), - LongBlock.newBlockBuilder(2).appendLong(0).appendLong(randomLongBetween(1, 1000)).build(), - LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(1, 1000)).appendLong(Long.MAX_VALUE).build(), - LongBlock.newBlockBuilder(2).appendLong(0).appendLong(Long.MAX_VALUE).build() } + blockFactory.newLongBlockBuilder(2).appendLong(Long.MIN_VALUE).appendLong(randomLongBetween(-1000, -1)).build(), + blockFactory.newLongBlockBuilder(2).appendLong(randomLongBetween(-1000, -1)).appendLong(0).build(), + blockFactory.newLongBlockBuilder(2).appendLong(0).appendLong(randomLongBetween(1, 1000)).build(), + blockFactory.newLongBlockBuilder(2).appendLong(randomLongBetween(1, 1000)).appendLong(Long.MAX_VALUE).build(), + blockFactory.newLongBlockBuilder(2).appendLong(0).appendLong(Long.MAX_VALUE).build() ), LONG, DEFAULT_SORTABLE @@ -335,17 +333,17 @@ public void testCompareLongs() { } public void testCompareDoubles() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - DoubleBlock.newBlockBuilder(2) - .appendDouble(-Double.MAX_VALUE) - .appendDouble(randomDoubleBetween(-1000, -1, true)) - .build(), - DoubleBlock.newBlockBuilder(2).appendDouble(randomDoubleBetween(-1000, -1, true)).appendDouble(0.0).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(0).appendDouble(randomDoubleBetween(1, 1000, true)).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(randomLongBetween(1, 1000)).appendDouble(Double.MAX_VALUE).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(0.0).appendDouble(Double.MAX_VALUE).build() } + blockFactory.newDoubleBlockBuilder(2) + .appendDouble(-Double.MAX_VALUE) + .appendDouble(randomDoubleBetween(-1000, -1, true)) + .build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(randomDoubleBetween(-1000, -1, true)).appendDouble(0.0).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(0).appendDouble(randomDoubleBetween(1, 1000, true)).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(randomLongBetween(1, 1000)).appendDouble(Double.MAX_VALUE).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(0.0).appendDouble(Double.MAX_VALUE).build() ), DOUBLE, DEFAULT_SORTABLE @@ -353,10 +351,10 @@ public void testCompareDoubles() { } public void testCompareUtf8() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() } + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() ), BYTES_REF, UTF8 @@ -364,15 +362,16 @@ public void testCompareUtf8() { } public void testCompareBooleans() { + BlockFactory blockFactory = blockFactory(); testCompare( - new Page(new Block[] { BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build() }), + new Page(blockFactory.newBooleanBlockBuilder(2).appendBoolean(false).appendBoolean(true).build()), BOOLEAN, DEFAULT_SORTABLE ); } private void testCompare(Page page, ElementType elementType, TopNEncoder encoder) { - Block nullBlock = Block.constantNullBlock(1); + Block nullBlock = TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(1); Page nullPage = new Page(new Block[] { nullBlock, nullBlock, nullBlock, nullBlock, nullBlock }); for (int b = 0; b < page.getBlockCount(); b++) { @@ -423,6 +422,7 @@ private void testCompare(Page page, ElementType elementType, TopNEncoder encoder assertThat(TopNOperator.compareRows(r2, r1), greaterThan(0)); } } + page.releaseBlocks(); } private TopNOperator.Row row( diff --git a/x-pack/plugin/esql/qa/server/build.gradle b/x-pack/plugin/esql/qa/server/build.gradle index 12c3a9d951383..8ce88e233b726 100644 --- a/x-pack/plugin/esql/qa/server/build.gradle +++ b/x-pack/plugin/esql/qa/server/build.gradle @@ -19,7 +19,7 @@ subprojects { } - if (project.name != 'security' && project.name != 'mixed-cluster' ) { + if (project.name != 'security' && project.name != 'mixed-cluster' && project.name != 'multi-clusters') { // The security project just configures its subprojects apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle new file mode 100644 index 0000000000000..7008bd8b7aa01 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + +dependencies { + javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) + javaRestTestImplementation project(xpackModule('esql:qa:server')) +} + +def supportedVersion = bwcVersion -> { + // This test is less restricted than the actual CCS compatibility matrix that we are supporting. + // CCQ is available on 8.13 or later + return bwcVersion.onOrAfter(Version.fromString("8.13.0")); +} + +BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> + tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { + usesBwcDistribution(bwcVersion) + systemProperty("tests.old_cluster_version", bwcVersion) + maxParallelForks = 1 + } +} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java new file mode 100644 index 0000000000000..20abfa2fe18fc --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.ccq; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; + +public class Clusters { + public static ElasticsearchCluster remoteCluster() { + return ElasticsearchCluster.local() + .name("remote_cluster") + .distribution(DistributionType.DEFAULT) + .version(Version.fromString(System.getProperty("tests.old_cluster_version"))) + .nodes(2) + .setting("node.roles", "[data,ingest,master]") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .shared(true) + .build(); + } + + public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteCluster) { + return ElasticsearchCluster.local() + .name("local_cluster") + .distribution(DistributionType.DEFAULT) + .version(Version.CURRENT) + .nodes(2) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("node.roles", "[data,ingest,master,remote_cluster_client]") + .setting("cluster.remote.remote_cluster.seeds", () -> "\"" + remoteCluster.getTransportEndpoint(0) + "\"") + .setting("cluster.remote.connections_per_cluster", "1") + .shared(true) + .build(); + } + + public static org.elasticsearch.Version oldVersion() { + return org.elasticsearch.Version.fromString(System.getProperty("tests.old_cluster_version")); + } +} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java new file mode 100644 index 0000000000000..75ec955ca0b2d --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.ccq; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.elasticsearch.xpack.ql.CsvSpecReader; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Locale; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * This suite loads the data into either the local cluster or the remote cluster, then run spec tests with CCQ. + * TODO: Some spec tests prevents us from splitting data across multiple shards/indices/clusters + */ +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class MultiClusterSpecIT extends EsqlSpecTestCase { + + static ElasticsearchCluster remoteCluster = Clusters.remoteCluster(); + static ElasticsearchCluster localCluster = Clusters.localCluster(remoteCluster); + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + + public MultiClusterSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvSpecReader.CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber, convertToRemoteIndices(testCase)); + } + + @Override + protected void shouldSkipTest(String testName) { + super.shouldSkipTest(testName); + assumeFalse("CCQ doesn't support enrich yet", hasEnrich(testCase.query)); + assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); + assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); + } + + @Override + protected String getTestRestCluster() { + return localCluster.getHttpAddresses(); + } + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] localHosts) throws IOException { + RestClient localClient = super.buildClient(settings, localHosts); + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + RestClient remoteClient = super.buildClient(settings, remoteHosts); + return twoClients(localClient, remoteClient); + } + + /** + * Creates a new mock client that dispatches every request to both the local and remote clusters, excluding _bulk and _query requests. + * - '_bulk' requests are randomly sent to either the local or remote cluster to populate data. Some spec tests, such as AVG, + * prevent the splitting of bulk requests. + * - '_query' requests are dispatched to the local cluster only, as we are testing cross-cluster queries. + */ + static RestClient twoClients(RestClient localClient, RestClient remoteClient) throws IOException { + RestClient twoClients = mock(RestClient.class); + when(twoClients.performRequest(any())).then(invocation -> { + Request request = invocation.getArgument(0); + if (request.getEndpoint().contains("_query")) { + return localClient.performRequest(request); + } else if (request.getEndpoint().contains("_bulk")) { + if (randomBoolean()) { + return remoteClient.performRequest(request); + } else { + return localClient.performRequest(request); + } + } else { + localClient.performRequest(request); + return remoteClient.performRequest(request); + } + }); + doAnswer(invocation -> { + IOUtils.close(localClient, remoteClient); + return null; + }).when(twoClients).close(); + return twoClients; + } + + static CsvSpecReader.CsvTestCase convertToRemoteIndices(CsvSpecReader.CsvTestCase testCase) { + String query = testCase.query; + String[] commands = query.split("\\|"); + String first = commands[0].trim(); + if (commands[0].toLowerCase(Locale.ROOT).startsWith("from")) { + String[] parts = commands[0].split("\\["); + assert parts.length >= 1 : parts; + String fromStatement = parts[0]; + String[] localIndices = fromStatement.substring("FROM ".length()).split(","); + String remoteIndices = Arrays.stream(localIndices) + .map(index -> "*:" + index.trim() + "," + index.trim()) + .collect(Collectors.joining(",")); + var newFrom = "FROM " + remoteIndices + commands[0].substring(fromStatement.length()); + testCase.query = newFrom + " " + query.substring(first.length()); + } + int offset = testCase.query.length() - query.length(); + if (offset != 0) { + final String pattern = "Line (\\d+):(\\d+):"; + final Pattern regex = Pattern.compile(pattern); + testCase.adjustExpectedWarnings(warning -> { + Matcher matcher = regex.matcher(warning); + if (matcher.find()) { + int line = Integer.parseInt(matcher.group(1)); + if (line == 1) { + int position = Integer.parseInt(matcher.group(2)); + int newPosition = position + offset; + return warning.replaceFirst(pattern, "Line " + line + ":" + newPosition + ":"); + } + } + return warning; + }); + } + return testCase; + } + + static boolean hasEnrich(String query) { + String[] commands = query.split("\\|"); + for (int i = 0; i < commands.length; i++) { + commands[i] = commands[i].trim(); + if (commands[i].toLowerCase(Locale.ROOT).startsWith("enrich")) { + return true; + } + } + return false; + } + + static boolean hasIndexMetadata(String query) { + String[] commands = query.split("\\|"); + if (commands[0].trim().toLowerCase(Locale.ROOT).startsWith("from")) { + String[] parts = commands[0].split("\\["); + return parts.length > 1 && parts[1].contains("_index"); + } + return false; + } +} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java new file mode 100644 index 0000000000000..2f0b11b7a3009 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.ccq; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsql; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class MultiClustersIT extends ESRestTestCase { + static ElasticsearchCluster remoteCluster = Clusters.remoteCluster(); + static ElasticsearchCluster localCluster = Clusters.localCluster(remoteCluster); + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + + @Override + protected String getTestRestCluster() { + return localCluster.getHttpAddresses(); + } + + record Doc(int id, String color, long data) { + + } + + final String localIndex = "test-local-index"; + List localDocs = List.of(); + final String remoteIndex = "test-remote-index"; + List remoteDocs = List.of(); + + @Before + public void setUpIndices() throws Exception { + final String mapping = """ + "properties": { + "data": { "type": "long" }, + "color": { "type": "keyword" } + } + """; + RestClient localClient = client(); + localDocs = IntStream.range(0, between(1, 500)) + .mapToObj(n -> new Doc(n, randomFrom("red", "yellow", "green"), randomIntBetween(1, 1000))) + .toList(); + createIndex( + localClient, + localIndex, + Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5)).build(), + mapping, + null + ); + indexDocs(localClient, localIndex, localDocs); + + remoteDocs = IntStream.range(0, between(1, 500)) + .mapToObj(n -> new Doc(n, randomFrom("red", "yellow", "green"), randomIntBetween(1, 1000))) + .toList(); + try (RestClient remoteClient = remoteClusterClient()) { + createIndex( + remoteClient, + remoteIndex, + Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5)).build(), + mapping, + null + ); + indexDocs(remoteClient, remoteIndex, remoteDocs); + } + } + + @After + public void wipeIndices() throws Exception { + try (RestClient remoteClient = remoteClusterClient()) { + deleteIndex(remoteClient, remoteIndex); + } + } + + void indexDocs(RestClient client, String index, List docs) throws IOException { + logger.info("--> indexing {} docs to index {}", docs.size(), index); + long total = 0; + for (Doc doc : docs) { + Request createDoc = new Request("POST", "/" + index + "/_doc/id_" + doc.id); + if (randomInt(100) < 10) { + createDoc.addParameter("refresh", "true"); + } + createDoc.setJsonEntity(Strings.format(""" + { "color": "%s", "data": %s} + """, doc.color, doc.data)); + assertOK(client.performRequest(createDoc)); + total += doc.data; + } + logger.info("--> index={} total={}", index, total); + refresh(client, index); + } + + private Map run(String query) throws IOException { + Map resp = runEsql(new RestEsqlTestCase.RequestObjectBuilder().query(query).build()); + logger.info("--> query {} response {}", query, resp); + return resp; + } + + public void testCount() throws Exception { + { + Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)"); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(localDocs.size() + remoteDocs.size())); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + { + Map result = run("FROM *:test-remote-index | STATS c = COUNT(*)"); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(remoteDocs.size())); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + } + + public void testUngroupedAggs() throws Exception { + { + Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data)"); + var columns = List.of(Map.of("name", "total", "type", "long")); + long sum = Stream.concat(localDocs.stream(), remoteDocs.stream()).mapToLong(d -> d.data).sum(); + var values = List.of(List.of(Math.toIntExact(sum))); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + { + Map result = run("FROM *:test-remote-index | STATS total = SUM(data)"); + var columns = List.of(Map.of("name", "total", "type", "long")); + long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); + var values = List.of(List.of(Math.toIntExact(sum))); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + } + + public void testGroupedAggs() throws Exception { + { + Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data) BY color | SORT color"); + var columns = List.of(Map.of("name", "total", "type", "long"), Map.of("name", "color", "type", "keyword")); + var values = Stream.concat(localDocs.stream(), remoteDocs.stream()) + .collect(Collectors.toMap(d -> d.color, Doc::data, Long::sum)) + .entrySet() + .stream() + .sorted(Map.Entry.comparingByKey()) + .map(e -> List.of(Math.toIntExact(e.getValue()), e.getKey())) + .toList(); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + { + Map result = run("FROM *:test-remote-index | STATS total = SUM(data) by color | SORT color"); + var columns = List.of(Map.of("name", "total", "type", "long"), Map.of("name", "color", "type", "keyword")); + var values = remoteDocs.stream() + .collect(Collectors.toMap(d -> d.color, Doc::data, Long::sum)) + .entrySet() + .stream() + .sorted(Map.Entry.comparingByKey()) + .map(e -> List.of(Math.toIntExact(e.getValue()), e.getKey())) + .toList(); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + } + + private RestClient remoteClusterClient() throws IOException { + var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses()); + return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 734f26fab547a..2d6280e4ceb9b 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -50,7 +50,7 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { private final String groupName; private final String testName; private final Integer lineNumber; - private final CsvTestCase testCase; + protected final CsvTestCase testCase; @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 5078fc1622637..2dd8b4ad68a1f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -139,6 +141,7 @@ public void close() { CsvColumn[] columns = null; + var blockFactory = BlockFactory.getInstance(new NoopCircuitBreaker("test-noop"), BigArrays.NON_RECYCLING_INSTANCE); try (BufferedReader reader = org.elasticsearch.xpack.ql.TestUtils.reader(source)) { String line; int lineNumber = 1; @@ -178,7 +181,7 @@ public void close() { columns[i] = new CsvColumn( name, type, - BlockUtils.wrapperFor(BlockFactory.getNonBreakingInstance(), ElementType.fromJava(type.clazz()), 8) + BlockUtils.wrapperFor(blockFactory, ElementType.fromJava(type.clazz()), 8) ); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index b45b30b6f2cf3..0590caf2019b4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -9,6 +9,7 @@ import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -17,6 +18,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -53,11 +55,25 @@ public void ensureBlocksReleased() { CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST); try { assertBusy(() -> { - logger.info("running tasks: {}", client().admin().cluster().prepareListTasks().get()); + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); }); } catch (Exception e) { - assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); + throw new RuntimeException("failed waiting for breakers to clear", e); } } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java new file mode 100644 index 0000000000000..7a5072120e5af --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.engine.SegmentsStats; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +/** A pausable testcase. Subclasses extend this testcase to simulate slow running queries. + * + * Uses the evaluation of a runtime field in the mappings "pause_me" of type long, along + * with a custom script language "pause", and semaphore "scriptPermits", to block execution. + */ +public abstract class AbstractPausableIntegTestCase extends AbstractEsqlIntegTestCase { + + private static final Logger LOGGER = LogManager.getLogger(AbstractPausableIntegTestCase.class); + + protected static final Semaphore scriptPermits = new Semaphore(0); + + protected int pageSize = -1; + + protected int numberOfDocs = -1; + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), PausableFieldPlugin.class); + } + + protected int pageSize() { + if (pageSize == -1) { + pageSize = between(10, 100); + } + return pageSize; + } + + protected int numberOfDocs() { + if (numberOfDocs == -1) { + numberOfDocs = between(4 * pageSize(), 5 * pageSize()); + } + return numberOfDocs; + } + + @Before + public void setupIndex() throws IOException { + assumeTrue("requires query pragmas", canUseQueryPragmas()); + + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("pause_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin() + .indices() + .prepareCreate("test") + .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) + .setMapping(mapping.endObject()) + .get(); + + BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < numberOfDocs(); i++) { + bulk.add(prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i)); + } + bulk.get(); + /* + * forceMerge so we can be sure that we don't bump into tiny + * segments that finish super quickly and cause us to report strange + * statuses when we expect "starting". + */ + client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); + /* + * Double super extra paranoid check that force merge worked. It's + * failed to reduce the index to a single segment and caused this test + * to fail in very difficult to debug ways. If it fails again, it'll + * trip here. Or maybe it won't! And we'll learn something. Maybe + * it's ghosts. + */ + SegmentsStats stats = client().admin().indices().prepareStats("test").get().getPrimaries().getSegments(); + if (stats.getCount() != 1L) { + fail(Strings.toString(stats)); + } + } + + public static class PausableFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "pause"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + try { + assertTrue(scriptPermits.tryAcquire(1, TimeUnit.MINUTES)); + } catch (Exception e) { + throw new AssertionError(e); + } + LOGGER.debug("--> emitting value"); + emit(1); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java new file mode 100644 index 0000000000000..b58a0cd66b904 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java @@ -0,0 +1,257 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.core.IsEqual; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; +import static org.elasticsearch.core.TimeValue.timeValueMinutes; +import static org.elasticsearch.core.TimeValue.timeValueSeconds; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +/** + * Individual tests for specific aspects of the async query API. + */ +public class AsyncEsqlQueryActionIT extends AbstractPausableIntegTestCase { + + @Override + protected Collection> nodePlugins() { + ArrayList> actions = new ArrayList<>(super.nodePlugins()); + actions.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); + actions.add(InternalExchangePlugin.class); + return Collections.unmodifiableList(actions); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .build(); + } + + public void testBasicAsyncExecution() throws Exception { + try (var initialResponse = sendAsyncQuery()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(initialResponse.isRunning(), is(true)); + String id = initialResponse.asyncExecutionId().get(); + + if (randomBoolean()) { + // let's timeout first + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueMillis(10)); + getResultsRequest.setKeepAlive(randomKeepAlive()); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var responseWithTimeout = future.get()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(responseWithTimeout.asyncExecutionId().get(), equalTo(id)); + assertThat(responseWithTimeout.isRunning(), is(true)); + } + } + + // Now we wait + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + getResultsRequest.setKeepAlive(randomKeepAlive()); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + + // release the permits to allow the query to proceed + scriptPermits.release(numberOfDocs()); + + try (var finalResponse = future.get()) { + assertThat(finalResponse, notNullValue()); + assertThat(finalResponse.isRunning(), is(false)); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(finalResponse).size(), equalTo(1)); + } + + // Get the stored result (again) + var again = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var finalResponse = again.get()) { + assertThat(finalResponse, notNullValue()); + assertThat(finalResponse.isRunning(), is(false)); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(finalResponse).size(), equalTo(1)); + } + + AcknowledgedResponse deleteResponse = deleteAsyncId(id); + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + // the stored response should no longer be retrievable + var e = expectThrows(ResourceNotFoundException.class, () -> deleteAsyncId(id)); + assertThat(e.getMessage(), IsEqual.equalTo(id)); + } finally { + scriptPermits.drainPermits(); + } + } + + public void testAsyncCancellation() throws Exception { + try (var initialResponse = sendAsyncQuery()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(initialResponse.isRunning(), is(true)); + String id = initialResponse.asyncExecutionId().get(); + + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + var future = client().execute(DeleteAsyncResultAction.INSTANCE, request); + + // there should be just one task + List tasks = getEsqlQueryTasks(); + assertThat(tasks.size(), is(1)); + + // release the permits to allow the query to proceed + scriptPermits.release(numberOfDocs()); + + var deleteResponse = future.actionGet(timeValueSeconds(60)); + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + + // there should be no tasks after delete + tasks = getEsqlQueryTasks(); + assertThat(tasks.size(), is(0)); + + // the stored response should no longer be retrievable + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setKeepAlive(timeValueMinutes(10)); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + var e = expectThrows( + ResourceNotFoundException.class, + () -> client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet() + ); + assertThat(e.getMessage(), equalTo(id)); + } finally { + scriptPermits.drainPermits(); + } + } + + public void testFinishingBeforeTimeoutKeep() { + testFinishingBeforeTimeout(true); + } + + public void testFinishingBeforeTimeoutDoNotKeep() { + testFinishingBeforeTimeout(false); + } + + private void testFinishingBeforeTimeout(boolean keepOnCompletion) { + // don't block the query execution at all + scriptPermits.drainPermits(); + assert scriptPermits.availablePermits() == 0; + + scriptPermits.release(numberOfDocs()); + + var request = new EsqlQueryRequestBuilder(client()).query("from test | stats sum(pause_me)") + .pragmas(queryPragmas()) + .async(true) + .waitForCompletionTimeout(TimeValue.timeValueSeconds(60)) + .keepOnCompletion(keepOnCompletion) + .keepAlive(randomKeepAlive()); + + try (var response = request.execute().actionGet(60, TimeUnit.SECONDS)) { + assertThat(response.isRunning(), is(false)); + assertThat(response.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(response).size(), equalTo(1)); + + if (keepOnCompletion) { + assertThat(response.asyncExecutionId(), isPresent()); + // we should be able to retrieve the response by id, since it has been kept + String id = response.asyncExecutionId().get(); + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var resp = future.actionGet(60, TimeUnit.SECONDS)) { + assertThat(resp.asyncExecutionId().get(), equalTo(id)); + assertThat(resp.isRunning(), is(false)); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(resp).size(), equalTo(1)); + } + } else { + assertThat(response.asyncExecutionId(), isEmpty()); + } + } finally { + scriptPermits.drainPermits(); + } + } + + private List getEsqlQueryTasks() throws Exception { + List foundTasks = new ArrayList<>(); + assertBusy(() -> { + List tasks = client().admin() + .cluster() + .prepareListTasks() + .setActions(EsqlQueryAction.NAME + "[a]") + .setDetailed(true) + .get() + .getTasks(); + foundTasks.addAll(tasks); + }); + return foundTasks; + } + + private EsqlQueryResponse sendAsyncQuery() { + scriptPermits.drainPermits(); + assert scriptPermits.availablePermits() == 0; + + scriptPermits.release(between(1, 5)); + var pragmas = queryPragmas(); + return new EsqlQueryRequestBuilder(client()).query("from test | stats sum(pause_me)") + .pragmas(pragmas) + .async(true) + // deliberately small timeout, to frequently trigger incomplete response + .waitForCompletionTimeout(TimeValue.timeValueNanos(1)) + .keepOnCompletion(randomBoolean()) + .keepAlive(randomKeepAlive()) + .execute() + .actionGet(60, TimeUnit.SECONDS); + } + + private QueryPragmas queryPragmas() { + return new QueryPragmas( + Settings.builder() + // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. + .put("data_partitioning", "shard") + // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. + .put("page_size", pageSize()) + .build() + ); + } + + private AcknowledgedResponse deleteAsyncId(String id) { + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + return client().execute(DeleteAsyncResultAction.INSTANCE, request).actionGet(timeValueSeconds(60)); + } + + TimeValue randomKeepAlive() { + return TimeValue.parseTimeValue(randomTimeValue(1, 5, "d"), "test"); + } + + public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { + public LocalStateEsqlAsync(final Settings settings, final Path configPath) { + super(settings, configPath); + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java new file mode 100644 index 0000000000000..8d7cbc5cd41be --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.DriverTaskRunner; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomPragmas; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; + +public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { + private static final String REMOTE_CLUSTER = "cluster-a"; + + @Override + protected Collection remoteClusterAlias() { + return List.of(REMOTE_CLUSTER); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPlugin.class); + plugins.add(InternalExchangePlugin.class); + plugins.add(PauseFieldPlugin.class); + return plugins; + } + + public static class InternalExchangePlugin extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting( + ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, + TimeValue.timeValueMillis(between(1000, 3000)), + Setting.Property.NodeScope + ) + ); + } + } + + @Before + public void resetPlugin() { + PauseFieldPlugin.allowEmitting = new CountDownLatch(1); + PauseFieldPlugin.startEmitting = new CountDownLatch(1); + } + + public static class PauseFieldPlugin extends Plugin implements ScriptPlugin { + public static CountDownLatch startEmitting = new CountDownLatch(1); + public static CountDownLatch allowEmitting = new CountDownLatch(1); + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "pause"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + if (context == LongFieldScript.CONTEXT) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + startEmitting.countDown(); + try { + assertTrue(allowEmitting.await(30, TimeUnit.SECONDS)); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + emit(1); + } + }; + } + }; + } + throw new IllegalStateException("unsupported type " + context); + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } + + private void createRemoteIndex(int numDocs) throws Exception { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("const"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + mapping.endObject(); + client(REMOTE_CLUSTER).admin().indices().prepareCreate("test").setMapping(mapping).get(); + BulkRequestBuilder bulk = client(REMOTE_CLUSTER).prepareBulk("test").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < numDocs; i++) { + bulk.add(new IndexRequest().source("foo", i)); + } + bulk.get(); + } + + public void testCancel() throws Exception { + createRemoteIndex(between(10, 100)); + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); + request.pragmas(randomPragmas()); + PlainActionFuture requestFuture = new PlainActionFuture<>(); + client().execute(EsqlQueryAction.INSTANCE, request, requestFuture); + assertTrue(PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + List rootTasks = new ArrayList<>(); + assertBusy(() -> { + List tasks = client().admin().cluster().prepareListTasks().setActions(EsqlQueryAction.NAME).get().getTasks(); + assertThat(tasks, hasSize(1)); + rootTasks.addAll(tasks); + }); + var cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTasks.get(0).taskId()).setReason("proxy timeout"); + client().execute(CancelTasksAction.INSTANCE, cancelRequest); + assertBusy(() -> { + List drivers = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(DriverTaskRunner.ACTION_NAME) + .get() + .getTasks(); + assertThat(drivers.size(), greaterThanOrEqualTo(1)); + for (TaskInfo driver : drivers) { + assertTrue(driver.cancellable()); + } + }); + PauseFieldPlugin.allowEmitting.countDown(); + Exception error = expectThrows(Exception.class, requestFuture::actionGet); + assertThat(error.getMessage(), containsString("proxy timeout")); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java new file mode 100644 index 0000000000000..e3a01bd6f4dd9 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.EnrichPlugin; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.junit.After; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; + +public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { + private static final String REMOTE_CLUSTER = "cluster_a"; + + @Override + protected Collection remoteClusterAlias() { + return List.of(REMOTE_CLUSTER); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPlugin.class); + plugins.add(InternalExchangePlugin.class); + plugins.add(LocalStateEnrich.class); + plugins.add(IngestCommonPlugin.class); + plugins.add(ReindexPlugin.class); + return plugins; + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + public static class InternalExchangePlugin extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting( + ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope + ) + ); + } + } + + public void testUnsupportedEnrich() { + Client localClient = client(LOCAL_CLUSTER); + localClient.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); + record Host(String ip, String os) { + + } + var hosts = List.of(new Host("192.168.1.3", "Windows")); + for (var h : hosts) { + localClient.prepareIndex("hosts").setSource("ip", h.ip, "os", h.os).get(); + } + localClient.admin().indices().prepareRefresh("hosts").get(); + EnrichPolicy policy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); + localClient.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request("hosts", policy)).actionGet(); + localClient.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request("hosts")).actionGet(); + assertAcked(client(LOCAL_CLUSTER).admin().indices().prepareDelete("hosts")); + + record Event(String ip, String message) { + + } + for (String cluster : List.of(LOCAL_CLUSTER, REMOTE_CLUSTER)) { + var events = List.of(new Event("192.168.1.4", "access denied"), new Event("192.168.1.3", "restart")); + assertAcked(client(cluster).admin().indices().prepareCreate("events").setMapping("ip", "type=ip", "message", "type=text")); + for (Event e : events) { + client(cluster).prepareIndex("events").setSource("ip", e.ip, "message", e.message).get(); + } + client(cluster).admin().indices().prepareRefresh("events").get(); + } + List queries = List.of( + "FROM *:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | LIMIT 1", + "FROM events*,*:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | LIMIT 1", + "FROM *:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | STATS COUNT(*) BY ip | LIMIT 1", + "FROM events*,*:events | EVAL ip_str = TO_STR(ip) | ENRICH hosts on ip_str | STATS COUNT(*) BY ip | LIMIT 1" + ); + for (String q : queries) { + Exception error = expectThrows(IllegalArgumentException.class, () -> runQuery(q).close()); + assertThat(error.getMessage(), containsString("cross clusters query doesn't support enrich yet")); + } + } + + @After + public void cleanClusters() { + cluster(LOCAL_CLUSTER).wipe(Set.of()); + client(LOCAL_CLUSTER).execute(DeleteEnrichPolicyAction.INSTANCE, new DeleteEnrichPolicyAction.Request("hosts")); + cluster(REMOTE_CLUSTER).wipe(Set.of()); + } + + protected EsqlQueryResponse runQuery(String query) { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query(query); + request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } + + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { + + public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + + plugins.add(new EnrichPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return this.getLicenseState(); + } + }); + } + + public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { + @Inject + public EnrichTransportXPackInfoAction( + TransportService transportService, + ActionFilters actionFilters, + LicenseService licenseService, + NodeClient client + ) { + super(transportService, actionFilters, licenseService, client); + } + + @Override + protected List infoActions() { + return Collections.singletonList(XPackInfoFeatureAction.ENRICH); + } + } + + @Override + protected Class> getInfoAction() { + return EnrichTransportXPackInfoAction.class; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index a24b643a299c2..83de2856d036c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -7,24 +7,32 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.junit.Before; import java.util.ArrayList; import java.util.Collection; -import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { @@ -37,11 +45,10 @@ protected Collection remoteClusterAlias() { @Override protected Collection> nodePlugins(String clusterAlias) { - List> plugins = new ArrayList<>(); - plugins.addAll(super.nodePlugins(clusterAlias)); + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); plugins.add(EsqlPlugin.class); plugins.add(InternalExchangePlugin.class); - return CollectionUtils.appendToCopy(super.nodePlugins(clusterAlias), EsqlPlugin.class); + return plugins; } public static class InternalExchangePlugin extends Plugin { @@ -57,61 +64,140 @@ public List> getSettings() { } } - public void testUnsupported() { - int numDocs = between(1, 10); - for (String cluster : List.of(LOCAL_CLUSTER, REMOTE_CLUSTER)) { - Client client = client(cluster); - assertAcked( - client.admin() - .indices() - .prepareCreate("events") - .setSettings(Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5))) - .setMapping("tag", "type=keyword", "v", "type=long") - ); - for (int i = 0; i < numDocs; i++) { - client.prepareIndex("events").setSource("tag", cluster, "v", i).get(); - } - client.admin().indices().prepareRefresh("events").get(); + @Before + public void populateLocalIndices() { + Client localClient = client(LOCAL_CLUSTER); + assertAcked( + localClient.admin() + .indices() + .prepareCreate("logs-1") + .setSettings(Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5))) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") + ); + for (int i = 0; i < 10; i++) { + localClient.prepareIndex("logs-1").setSource("id", "local-" + i, "tag", "local", "v", i).get(); } - var emptyQueries = List.of( - "from *:* | LIMIT 0", - "from *,*:* | LIMIT 0", - "from *:events* | LIMIT 0", - "from events,*:events* | LIMIT 0" + localClient.admin().indices().prepareRefresh("logs-1").get(); + } + + @Before + public void populateRemoteIndices() { + Client remoteClient = client(REMOTE_CLUSTER); + assertAcked( + remoteClient.admin() + .indices() + .prepareCreate("logs-2") + .setSettings(Settings.builder().put("index.number_of_shards", randomIntBetween(1, 5))) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") ); - for (String q : emptyQueries) { - try (EsqlQueryResponse resp = runQuery(q)) { - assertThat(resp.columns(), hasSize(2)); - assertFalse(resp.values().hasNext()); + for (int i = 0; i < 10; i++) { + remoteClient.prepareIndex("logs-2").setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); + } + remoteClient.admin().indices().prepareRefresh("logs-2").get(); + } + + public void testSimple() { + try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats sum (v)")) { + List> values = getValuesList(resp); + assertThat(values, hasSize(1)); + assertThat(values.get(0), equalTo(List.of(330L))); + } + try (EsqlQueryResponse resp = runQuery("from logs-*,*:logs-* | stats count(*) by tag | sort tag | keep tag")) { + List> values = getValuesList(resp); + assertThat(values, hasSize(2)); + assertThat(values.get(0), equalTo(List.of("local"))); + assertThat(values.get(1), equalTo(List.of("remote"))); + } + } + + public void testMetadataIndex() { + try (EsqlQueryResponse resp = runQuery("FROM logs*,*:logs* [METADATA _index] | stats sum(v) by _index | sort _index")) { + List> values = getValuesList(resp); + assertThat(values.get(0), equalTo(List.of(285L, "cluster-a:logs-2"))); + assertThat(values.get(1), equalTo(List.of(45L, "logs-1"))); + } + } + + public void testProfile() { + final int localOnlyProfiles; + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM logs* | stats sum(v)"); + request.profile(true); + try (EsqlQueryResponse resp = runQuery(request)) { + List> values = getValuesList(resp); + assertThat(values.get(0), equalTo(List.of(45L))); + assertNotNull(resp.profile()); + List drivers = resp.profile().drivers(); + assertThat(drivers.size(), greaterThanOrEqualTo(2)); // one coordinator and at least one data + localOnlyProfiles = drivers.size(); } } - var remotePatterns = List.of("*:*", "*, *:*", "*:events*", "events, *:events*"); - for (String pattern : remotePatterns) { - var query = "FROM " + pattern + " | LIMIT " + between(1, 100); - IllegalArgumentException error = expectThrows(IllegalArgumentException.class, () -> runQuery(query).close()); - assertThat(error.getMessage(), equalTo("ES|QL does not yet support querying remote indices [" + pattern + "]")); + final int remoteOnlyProfiles; + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM *:logs* | stats sum(v)"); + request.profile(true); + try (EsqlQueryResponse resp = runQuery(request)) { + List> values = getValuesList(resp); + assertThat(values.get(0), equalTo(List.of(285L))); + assertNotNull(resp.profile()); + List drivers = resp.profile().drivers(); + assertThat(drivers.size(), greaterThanOrEqualTo(3)); // two coordinators and at least one data + remoteOnlyProfiles = drivers.size(); + } } - int limit = between(1, numDocs); - var localQueries = List.of("from events* | LIMIT " + limit, "from * | LIMIT " + limit); - for (String q : localQueries) { - try (EsqlQueryResponse resp = runQuery(q)) { - assertThat(resp.columns(), hasSize(2)); - int rows = 0; - Iterator> values = resp.values(); - while (values.hasNext()) { - values.next(); - ++rows; - } - assertThat(rows, equalTo(limit)); + final int allProfiles; + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM logs*,*:logs* | stats total = sum(v)"); + request.profile(true); + try (EsqlQueryResponse resp = runQuery(request)) { + List> values = getValuesList(resp); + assertThat(values.get(0), equalTo(List.of(330L))); + assertNotNull(resp.profile()); + List drivers = resp.profile().drivers(); + assertThat(drivers.size(), greaterThanOrEqualTo(4)); // two coordinators and at least two data + allProfiles = drivers.size(); } } + assertThat(allProfiles, equalTo(localOnlyProfiles + remoteOnlyProfiles - 1)); + } + + public void testWarnings() throws Exception { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); + PlainActionFuture future = new PlainActionFuture<>(); + InternalTestCluster cluster = cluster(LOCAL_CLUSTER); + String node = randomFrom(cluster.getNodeNames()); + CountDownLatch latch = new CountDownLatch(1); + cluster.client(node).execute(EsqlQueryAction.INSTANCE, request, ActionListener.wrap(resp -> { + TransportService ts = cluster.getInstance(TransportService.class, node); + Map> responseHeaders = ts.getThreadPool().getThreadContext().getResponseHeaders(); + List warnings = responseHeaders.getOrDefault("Warning", List.of()) + .stream() + .filter(w -> w.contains("is not an IP string literal")) + .toList(); + assertThat(warnings.size(), greaterThanOrEqualTo(20)); + List> values = getValuesList(resp); + assertThat(values.get(0).get(0), equalTo(330L)); + assertNull(values.get(0).get(1)); + latch.countDown(); + }, e -> { + latch.countDown(); + throw new AssertionError(e); + })); + assertTrue(latch.await(30, TimeUnit.SECONDS)); } protected EsqlQueryResponse runQuery(String query) { - logger.info("--> query [{}]", query); EsqlQueryRequest request = new EsqlQueryRequest(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + return runQuery(request); + } + + protected EsqlQueryResponse runQuery(EsqlQueryRequest request) { return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index e499d3b783bb8..e249504f7e2a1 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -12,45 +12,26 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; -import org.elasticsearch.index.engine.SegmentsStats; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.junit.Before; -import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -71,83 +52,34 @@ value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "These tests were failing frequently, let's learn as much as we can" ) -public class EsqlActionTaskIT extends AbstractEsqlIntegTestCase { - private static int PAGE_SIZE; - private static int NUM_DOCS; +public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { - private static String READ_DESCRIPTION; - private static String MERGE_DESCRIPTION; private static final Logger LOGGER = LogManager.getLogger(EsqlActionTaskIT.class); - @Override - protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), PausableFieldPlugin.class); - } + private String READ_DESCRIPTION; + private String MERGE_DESCRIPTION; @Before - public void setupIndex() throws IOException { + public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); - PAGE_SIZE = between(10, 100); - NUM_DOCS = between(4 * PAGE_SIZE, 5 * PAGE_SIZE); READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = PAGE_SIZE, limit = 2147483647] + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] - \\_ExchangeSinkOperator""".replace("PAGE_SIZE", Integer.toString(PAGE_SIZE)); + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); MERGE_DESCRIPTION = """ \\_ExchangeSourceOperator[] \\_AggregationOperator[mode = FINAL, aggs = sum of longs] \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 500] \\_OutputOperator[columns = [sum(pause_me)]]"""; - - XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); - mapping.startObject("runtime"); - { - mapping.startObject("pause_me"); - { - mapping.field("type", "long"); - mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - client().admin() - .indices() - .prepareCreate("test") - .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) - .setMapping(mapping.endObject()) - .get(); - - BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - for (int i = 0; i < NUM_DOCS; i++) { - bulk.add(prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i)); - } - bulk.get(); - /* - * forceMerge so we can be sure that we don't bump into tiny - * segments that finish super quickly and cause us to report strange - * statuses when we expect "starting". - */ - client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); - /* - * Double super extra paranoid check that force merge worked. It's - * failed to reduce the index to a single segment and caused this test - * to fail in very difficult to debug ways. If it fails again, it'll - * trip here. Or maybe it won't! And we'll learn something. Maybe - * it's ghosts. - */ - SegmentsStats stats = client().admin().indices().prepareStats("test").get().getPrimaries().getSegments(); - if (stats.getCount() != 1L) { - fail(Strings.toString(stats)); - } } public void testTaskContents() throws Exception { ActionFuture response = startEsql(); try { getTasksStarting(); - scriptPermits.release(PAGE_SIZE); + scriptPermits.release(pageSize()); List foundTasks = getTasksRunning(); int luceneSources = 0; int valuesSourceReaders = 0; @@ -158,9 +90,11 @@ public void testTaskContents() throws Exception { assertThat(status.sessionId(), not(emptyOrNullString())); for (DriverStatus.OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); - if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + PAGE_SIZE)) { + if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + pageSize())) { LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); + assertThat(oStatus.processedQueries(), equalTo(Set.of("*:*"))); + assertThat(oStatus.processedShards(), equalTo(Set.of("test:0"))); assertThat(oStatus.sliceIndex(), lessThanOrEqualTo(oStatus.totalSlices())); assertThat(oStatus.sliceMin(), greaterThanOrEqualTo(0)); assertThat(oStatus.sliceMax(), greaterThanOrEqualTo(oStatus.sliceMin())); @@ -204,9 +138,9 @@ public void testTaskContents() throws Exception { assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(1)); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { - assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo((long) NUM_DOCS)); + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo((long) numberOfDocs())); } } } @@ -219,7 +153,7 @@ public void testCancelRead() throws Exception { cancelTask(running.taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -231,7 +165,7 @@ public void testCancelMerge() throws Exception { cancelTask(running.taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -249,7 +183,7 @@ public void testCancelEsqlTask() throws Exception { cancelTask(tasks.get(0).taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -261,7 +195,7 @@ private ActionFuture startEsql() { // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. .put("data_partitioning", "shard") // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. - .put("page_size", PAGE_SIZE) + .put("page_size", pageSize()) // Report the status after every action .put("status_interval", "0ms") .build() @@ -274,7 +208,7 @@ private void cancelTask(TaskId taskId) { request.setWaitForCompletion(false); LOGGER.debug("--> cancelling task [{}] without waiting for completion", taskId); client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(true); LOGGER.debug("--> cancelling task [{}] with waiting for completion", taskId); @@ -367,56 +301,4 @@ private void assertCancelled(ActionFuture response) throws Ex ) ); } - - private static final Semaphore scriptPermits = new Semaphore(0); - - public static class PausableFieldPlugin extends Plugin implements ScriptPlugin { - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "pause"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - try { - assertTrue(scriptPermits.tryAcquire(1, TimeUnit.MINUTES)); - } catch (Exception e) { - throw new AssertionError(e); - } - LOGGER.debug("--> emitting value"); - emit(1); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } - } - } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index 3a616fc76edd2..b2685ac2c8276 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -8,18 +8,42 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.core.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; +import static org.hamcrest.core.IsEqual.equalTo; +/** + * Runs test scenarios from EsqlActionIT, with an extra level of indirection + * through the async query and async get APIs. + */ public class EsqlAsyncActionIT extends EsqlActionIT { + @Override + protected Collection> nodePlugins() { + ArrayList> actions = new ArrayList<>(super.nodePlugins()); + actions.add(LocalStateEsqlAsync.class); + return Collections.unmodifiableList(actions); + } + @Override protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { EsqlQueryRequest request = new EsqlQueryRequest(); @@ -35,24 +59,40 @@ protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, Query var response = run(request); if (response.asyncExecutionId().isPresent()) { + String id = response.asyncExecutionId().get(); assertThat(response.isRunning(), is(true)); assertThat(response.columns(), is(empty())); // no partial results assertThat(response.pages(), is(empty())); response.close(); - return getAsyncResponse(response.asyncExecutionId().get()); + var getResponse = getAsyncResponse(id); + assertDeletable(id); + return getResponse; } else { return response; } } + void assertDeletable(String id) { + var resp = deleteAsyncId(id); + assertTrue(resp.isAcknowledged()); + // the stored response should no longer be retrievable + var e = expectThrows(ResourceNotFoundException.class, () -> getAsyncResponse(id)); + assertThat(e.getMessage(), equalTo(id)); + } + EsqlQueryResponse getAsyncResponse(String id) { try { - GetAsyncResultRequest getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout( - TimeValue.timeValueSeconds(60) - ); - var resp = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS); - // resp.decRef(); // the client has incremented our non-0 resp - return resp; + var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueSeconds(60)); + return client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout", e); + } + } + + AcknowledgedResponse deleteAsyncId(String id) { + try { + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + return client().execute(DeleteAsyncResultAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); } catch (ElasticsearchTimeoutException e) { throw new AssertionError("timeout", e); } @@ -71,4 +111,10 @@ public void testOverlappingIndexPatterns() throws Exception { public void testIndexPatterns() throws Exception { super.testOverlappingIndexPatterns(); } + + public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { + public LocalStateEsqlAsync(final Settings settings, final Path configPath) { + super(settings, configPath); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index d8e4fcf01bd80..b6473e3bd03ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -25,12 +25,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.xcontent.InstantiatingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; @@ -39,35 +34,10 @@ import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.esql.action.ResponseValueUtils.valuesToPage; - public class EsqlQueryResponse extends ActionResponse implements ChunkedToXContentObject, Releasable { private final AbstractRefCounted counted = AbstractRefCounted.of(this::closeInternal); - private static final ParseField ID = new ParseField("id"); - private static final ParseField IS_RUNNING = new ParseField("is_running"); - private static final InstantiatingObjectParser PARSER; - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "esql/query_response", - true, - EsqlQueryResponse.class - ); - parser.declareString(optionalConstructorArg(), ID); - parser.declareField( - optionalConstructorArg(), - p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? false : p.booleanValue(), - IS_RUNNING, - ObjectParser.ValueType.BOOLEAN_OR_NULL - ); - parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfo.fromXContent(p), new ParseField("columns")); - parser.declareField(constructorArg(), (p, c) -> p.list(), new ParseField("values"), ObjectParser.ValueType.OBJECT_ARRAY); - PARSER = parser.build(); - } - private final List columns; private final List pages; private final Profile profile; @@ -99,27 +69,6 @@ public EsqlQueryResponse(List columns, List pages, @Nullable P this(columns, pages, profile, columnar, null, false, isAsync); } - // Used for XContent reconstruction - @ParserConstructor - public EsqlQueryResponse(@Nullable String asyncExecutionId, Boolean isRunning, List columns, List> values) { - this( - columns, - List.of(valuesToPage(columns, values)), - null, - false, - asyncExecutionId, - isRunning != null, - isAsync(asyncExecutionId, isRunning) - ); - } - - static boolean isAsync(@Nullable String asyncExecutionId, Boolean isRunning) { - if (asyncExecutionId != null || isRunning != null) { - return true; - } - return false; - } - /** * Build a reader for the response. */ @@ -229,10 +178,6 @@ public boolean isFragment() { return false; } - public static EsqlQueryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 6541e945720bf..d68245aa3296e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -135,10 +136,10 @@ private static SpatialPoint pointValueAt(SpatialCoordinateTypes spatial, String * Converts a list of values to Pages so that we can parse from xcontent. It's not * super efficient, but it doesn't really have to be. */ - static Page valuesToPage(List columns, List> values) { + static Page valuesToPage(BlockFactory blockFactory, List columns, List> values) { List dataTypes = columns.stream().map(ColumnInfo::type).toList(); List results = dataTypes.stream() - .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) + .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size(), blockFactory)) .toList(); for (List row : values) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java new file mode 100644 index 0000000000000..1857e32e99b06 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +public class RestEsqlDeleteAsyncResultAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new RestHandler.Route(DELETE, "/_query/async/{id}")); + } + + @Override + public String getName() { + return "esql_delete_async_result"; + } + + @Override + protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + DeleteAsyncResultRequest delete = new DeleteAsyncResultRequest(request.param("id")); + return channel -> client.execute(DeleteAsyncResultAction.INSTANCE, delete, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 280ef898c3b90..54c9fec4da96a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -105,7 +105,7 @@ public Block eval(Page page) { */ private Block eval(Block lhs, Block rhs) { int positionCount = lhs.getPositionCount(); - try (BooleanBlock.Builder result = BooleanBlock.newBlockBuilder(positionCount, lhs.blockFactory())) { + try (BooleanBlock.Builder result = lhs.blockFactory().newBooleanBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { if (lhs.getValueCount(p) > 1) { result.appendNull(); @@ -132,7 +132,7 @@ private Block eval(Block lhs, Block rhs) { private Block eval(BooleanVector lhs, BooleanVector rhs) { int positionCount = lhs.getPositionCount(); - try (var result = BooleanVector.newVectorFixedBuilder(positionCount, lhs.blockFactory())) { + try (var result = lhs.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { result.appendBoolean(bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); } @@ -225,12 +225,12 @@ public String toString() { private static Block block(Literal lit, BlockFactory blockFactory, int positions) { var value = lit.value(); if (value == null) { - return Block.constantNullBlock(positions, blockFactory); + return blockFactory.newConstantNullBlock(positions); } if (value instanceof List multiValue) { if (multiValue.isEmpty()) { - return Block.constantNullBlock(positions, blockFactory); + return blockFactory.newConstantNullBlock(positions); } var wrapper = BlockUtils.wrapperFor(blockFactory, ElementType.fromJava(multiValue.get(0).getClass()), positions); for (int i = 0; i < positions; i++) { @@ -267,14 +267,9 @@ record IsNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvalu public Block eval(Page page) { try (Block fieldBlock = field.eval(page)) { if (fieldBlock.asVector() != null) { - return BooleanBlock.newConstantBlockWith(false, page.getPositionCount(), driverContext.blockFactory()); + return driverContext.blockFactory().newConstantBooleanBlockWith(false, page.getPositionCount()); } - try ( - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder( - page.getPositionCount(), - driverContext.blockFactory() - ) - ) { + try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { builder.appendBoolean(fieldBlock.isNull(p)); } @@ -321,14 +316,9 @@ record IsNotNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEv public Block eval(Page page) { try (Block fieldBlock = field.eval(page)) { if (fieldBlock.asVector() != null) { - return BooleanBlock.newConstantBlockWith(true, page.getPositionCount(), driverContext.blockFactory()); + return driverContext.blockFactory().newConstantBooleanBlockWith(true, page.getPositionCount()); } - try ( - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder( - page.getPositionCount(), - driverContext.blockFactory() - ) - ) { + try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { builder.appendBoolean(fieldBlock.isNull(p) == false); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 9f5c492d7fe7c..5df0ac03206c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -123,7 +123,7 @@ private static class Evaluator implements ExpressionEvaluator { public final Block eval(Page page) { try (BytesRefBlock fieldVal = (BytesRefBlock) field.eval(page); BytesRefBlock delimVal = (BytesRefBlock) delim.eval(page)) { int positionCount = page.getPositionCount(); - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, context.blockFactory())) { + try (BytesRefBlock.Builder builder = context.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRefBuilder work = new BytesRefBuilder(); // TODO BreakingBytesRefBuilder so we don't blow past circuit breakers BytesRef fieldScratch = new BytesRef(); BytesRef delimScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index bf05aeee4d228..b7d9a3a73929e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -108,7 +106,7 @@ protected String name() { @Override protected Block evalNullable(Block block) { - try (var builder = IntBlock.newBlockBuilder(block.getPositionCount(), driverContext.blockFactory())) { + try (var builder = driverContext.blockFactory().newIntBlockBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { int valueCount = block.getValueCount(p); if (valueCount == 0) { @@ -123,7 +121,7 @@ protected Block evalNullable(Block block) { @Override protected Block evalNotNullable(Block block) { - try (var builder = IntVector.newVectorFixedBuilder(block.getPositionCount(), driverContext.blockFactory())) { + try (var builder = driverContext.blockFactory().newIntVectorFixedBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { builder.appendInt(block.getValueCount(p)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index a0abced909c48..48b83aa205549 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -45,7 +45,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG ? new MvSumUnsignedLongEvaluator.Factory(source(), fieldEval) : new MvSumLongEvaluator.Factory(source(), fieldEval); - case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index e05dd9a00c567..1f833a9254ab8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -7,15 +7,24 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.PropagateEmptyRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.NamedExpression; +import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -25,10 +34,15 @@ import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.rule.ParameterizedRule; import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; +import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.cleanup; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.operators; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP; public class LocalLogicalPlanOptimizer extends ParameterizedRuleExecutor { @@ -50,10 +64,23 @@ protected List> batches() { var rules = new ArrayList>(); rules.add(local); // TODO: if the local rules haven't touched the tree, the rest of the rules can be skipped - rules.addAll(LogicalPlanOptimizer.rules()); + rules.addAll(asList(operators(), cleanup())); + replaceRules(rules); return rules; } + private List> replaceRules(List> listOfRules) { + for (Batch batch : listOfRules) { + var rules = batch.rules(); + for (int i = 0; i < rules.length; i++) { + if (rules[i] instanceof PropagateEmptyRelation) { + rules[i] = new LocalPropagateEmptyRelation(); + } + } + } + return listOfRules; + } + public LogicalPlan localOptimize(LogicalPlan plan) { return execute(plan); } @@ -132,6 +159,32 @@ protected boolean skipExpression(Expression e) { } } + /** + * Local aggregation can only produce intermediate state that get wired into the global agg. + */ + private static class LocalPropagateEmptyRelation extends PropagateEmptyRelation { + + /** + * Local variant of the aggregation that returns the intermediate value. + */ + @Override + protected void aggOutput(NamedExpression agg, AggregateFunction aggFunc, BlockFactory blockFactory, List blocks) { + List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); + for (Attribute o : output) { + DataType dataType = o.dataType(); + // boolean right now is used for the internal #seen so always return true + var value = dataType == DataTypes.BOOLEAN ? true + // look for count(literal) with literal != null + : aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L + // otherwise nullify + : null; + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); + wrapper.accept(value); + blocks.add(wrapper.builder().build()); + } + } + } + abstract static class ParameterizedOptimizerRule extends ParameterizedRule { public final LogicalPlan apply(LogicalPlan plan, P context) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 66654b78c3af4..e4f67838731a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; -import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; @@ -63,7 +62,6 @@ import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.CollectionUtils; import org.elasticsearch.xpack.ql.util.Holder; @@ -101,17 +99,8 @@ protected List> batches() { return rules(); } - protected static List> rules() { - var substitutions = new Batch<>( - "Substitutions", - Limiter.ONCE, - new SubstituteSurrogates(), - new ReplaceRegexMatch(), - new ReplaceAliasingEvalWithProject() - // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 - ); - - var operators = new Batch<>( + protected static Batch operators() { + return new Batch<>( "Operator Optimization", new CombineProjections(), new CombineEvals(), @@ -146,19 +135,33 @@ protected static List> rules() { new PruneOrderByBeforeStats(), new PruneRedundantSortClauses() ); + } - var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); - var cleanup = new Batch<>( + protected static Batch cleanup() { + return new Batch<>( "Clean Up", new ReplaceDuplicateAggWithEval(), // pushing down limits again, because ReplaceDuplicateAggWithEval could create new Project nodes that can still be optimized new PushDownAndCombineLimits(), new ReplaceLimitAndSortAsTopN() ); + } + + protected static List> rules() { + var substitutions = new Batch<>( + "Substitutions", + Limiter.ONCE, + new SubstituteSurrogates(), + new ReplaceRegexMatch(), + new ReplaceAliasingEvalWithProject() + // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 + ); + + var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions, operators, skip, cleanup, defaultTopN, label); + return asList(substitutions, operators(), skip, cleanup(), defaultTopN, label); } // TODO: currently this rule only works for aggregate functions (AVG) @@ -633,6 +636,7 @@ protected LogicalPlan rule(UnaryPlan plan) { } } + @SuppressWarnings("removal") static class PropagateEmptyRelation extends OptimizerRules.OptimizerRule { @Override @@ -650,29 +654,14 @@ protected LogicalPlan rule(UnaryPlan plan) { return p; } - private static List aggsFromEmpty(List aggs) { - // TODO: Should we introduce skip operator that just never queries the source + private List aggsFromEmpty(List aggs) { List blocks = new ArrayList<>(); - var blockFactory = BlockFactory.getNonBreakingInstance(); + var blockFactory = PlannerUtils.NON_BREAKING_BLOCK_FACTORY; int i = 0; for (var agg : aggs) { // there needs to be an alias if (agg instanceof Alias a && a.child() instanceof AggregateFunction aggFunc) { - List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); - for (Attribute o : output) { - DataType dataType = o.dataType(); - // fill the boolean block later in LocalExecutionPlanner - if (dataType != DataTypes.BOOLEAN) { - // look for count(literal) with literal != null - var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); - if (aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null)) { - wrapper.accept(0L); - } else { - wrapper.accept(null); - } - blocks.add(wrapper.builder().build()); - } - } + aggOutput(agg, aggFunc, blockFactory, blocks); } else { throw new EsqlIllegalArgumentException("Did not expect a non-aliased aggregation {}", agg); } @@ -680,6 +669,16 @@ private static List aggsFromEmpty(List aggs) { return blocks; } + /** + * The folded aggregation output - this variant is for the coordinator/final. + */ + protected void aggOutput(NamedExpression agg, AggregateFunction aggFunc, BlockFactory blockFactory, List blocks) { + // look for count(literal) with literal != null + Object value = aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L : null; + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(aggFunc.dataType()), 1); + wrapper.accept(value); + blocks.add(wrapper.builder().build()); + } } private static LogicalPlan skipPlan(UnaryPlan plan) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 6dcfa782c55f5..88d0db69234c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -56,7 +56,6 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -88,7 +87,6 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; -import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; import java.util.ArrayList; @@ -323,29 +321,6 @@ private PhysicalOperation planExchange(ExchangeExec exchangeExec, LocalExecution private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalExecutionPlannerContext context) { Objects.requireNonNull(exchangeSinkHandler, "ExchangeSinkHandler wasn't provided"); var child = exchangeSink.child(); - // see https://github.com/elastic/elasticsearch/issues/100807 - handle case where the plan has been fully minimized - // to a local relation and the aggregate intermediate data erased. For this scenario, match the output the exchange output - // with that of the local relation - - if (child instanceof LocalSourceExec localExec) { - var output = exchangeSink.output(); - var localOutput = localExec.output(); - if (output.equals(localOutput) == false) { - // the outputs are going to be similar except for the bool "seen" flags which are added in below - List blocks = new ArrayList<>(asList(localExec.supplier().get())); - if (blocks.size() > 0) { - for (int i = 0, s = output.size(); i < s; i++) { - var out = output.get(i); - if (out.dataType() == DataTypes.BOOLEAN) { - blocks.add(i, BlockFactory.getNonBreakingInstance().newConstantBooleanBlockWith(true, 1)); - } - } - } - var newSupplier = LocalSupplier.of(blocks.toArray(Block[]::new)); - - child = new LocalSourceExec(localExec.source(), output, newSupplier); - } - } PhysicalOperation source = plan(child, context); @@ -814,9 +789,7 @@ public List createDrivers(String sessionId) { @Override public String describe() { - StringBuilder sb = new StringBuilder(); - sb.append(driverFactories.stream().map(DriverFactory::describe).collect(joining("\n"))); - return sb.toString(); + return driverFactories.stream().map(DriverFactory::describe).collect(joining("\n")); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index ff0ac935802e8..74b306e7a2296 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -17,6 +20,8 @@ import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -65,6 +70,19 @@ public static Tuple breakPlanBetweenCoordinatorAndDa return new Tuple<>(coordinatorPlan, dataNodePlan.get()); } + public static boolean hasEnrich(PhysicalPlan plan) { + boolean[] found = { false }; + plan.forEachDown(p -> { + if (p instanceof EnrichExec) { + found[0] = true; + } + if (p instanceof FragmentExec f) { + f.fragment().forEachDown(Enrich.class, e -> found[0] = true); + } + }); + return found[0]; + } + /** * Returns a set of concrete indices after resolving the original indices specified in the FROM command. */ @@ -225,4 +243,14 @@ public static ElementType toElementType(DataType dataType) { } throw EsqlIllegalArgumentException.illegalDataType(dataType); } + + /** + * A non-breaking block factory used to create small pages during the planning + * TODO: Remove this + */ + @Deprecated(forRemoval = true) + public static final BlockFactory NON_BREAKING_BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java new file mode 100644 index 0000000000000..e25136f4d9532 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * A request to initiate a compute on a remote cluster. The output pages of the compute on the remote cluster will be placed in an + * exchange sink specified by the {@code sessionId}. The exchange sink associated with this {@code sessionId} should have been opened + * via {@link ExchangeService#openExchange} before sending this request to the remote cluster. The coordinator on the main cluster + * will poll pages from this sink. Internally, this compute will trigger sub-computes on data nodes via {@link DataNodeRequest}. + */ +final class ClusterComputeRequest extends TransportRequest implements IndicesRequest { + private static final PlanNameRegistry planNameRegistry = new PlanNameRegistry(); + private final String clusterAlias; + private final String sessionId; + private final EsqlConfiguration configuration; + private final PhysicalPlan plan; + + private final String[] originalIndices; + private final String[] indices; + + /** + * A request to start a compute on a remote cluster. + * + * @param clusterAlias the cluster alias of this remote cluster + * @param sessionId the sessionId in which the output pages will be placed in the exchange sink specified by this id + * @param configuration the configuration for this compute + * @param plan the physical plan to be executed + * @param indices the target indices + * @param originalIndices the original indices - needed to resolve alias filters + */ + ClusterComputeRequest( + String clusterAlias, + String sessionId, + EsqlConfiguration configuration, + PhysicalPlan plan, + String[] indices, + String[] originalIndices + ) { + this.clusterAlias = clusterAlias; + this.sessionId = sessionId; + this.configuration = configuration; + this.plan = plan; + this.indices = indices; + this.originalIndices = originalIndices; + } + + ClusterComputeRequest(StreamInput in) throws IOException { + super(in); + this.clusterAlias = in.readString(); + this.sessionId = in.readString(); + this.configuration = new EsqlConfiguration(in); + this.plan = new PlanStreamInput(in, planNameRegistry, in.namedWriteableRegistry(), configuration).readPhysicalPlanNode(); + this.indices = in.readStringArray(); + this.originalIndices = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(clusterAlias); + out.writeString(sessionId); + configuration.writeTo(out); + new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + out.writeStringArray(indices); + out.writeStringArray(originalIndices); + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + if (parentTaskId.isSet() == false) { + assert false : "DataNodeRequest must have a parent task"; + throw new IllegalStateException("DataNodeRequest must have a parent task"); + } + return new CancellableTask(id, type, action, "", parentTaskId, headers) { + @Override + public String getDescription() { + return ClusterComputeRequest.this.getDescription(); + } + }; + } + + String clusterAlias() { + return clusterAlias; + } + + String sessionId() { + return sessionId; + } + + EsqlConfiguration configuration() { + return configuration; + } + + String[] originalIndices() { + return originalIndices; + } + + PhysicalPlan plan() { + return plan; + } + + @Override + public String getDescription() { + return "indices=" + Arrays.toString(indices) + " plan=" + plan; + } + + @Override + public String toString() { + return "ClusterComputeRequest{" + getDescription() + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterComputeRequest request = (ClusterComputeRequest) o; + return clusterAlias.equals(request.clusterAlias) + && sessionId.equals(request.sessionId) + && configuration.equals(request.configuration) + && Arrays.equals(indices, request.indices) + && Arrays.equals(originalIndices, request.originalIndices) + && plan.equals(request.plan) + && getParentTask().equals(request.getParentTask()); + } + + @Override + public int hashCode() { + return Objects.hash(sessionId, configuration, Arrays.hashCode(indices), Arrays.hashCode(originalIndices), plan); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java new file mode 100644 index 0000000000000..44796ca78aa91 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.transport.TransportResponse; + +import java.io.IOException; +import java.util.List; + +/** + * The compute result of {@link DataNodeRequest} or {@link ClusterComputeRequest} + */ +final class ComputeResponse extends TransportResponse { + private final List profiles; + + ComputeResponse(List profiles) { + this.profiles = profiles; + } + + ComputeResponse(StreamInput in) throws IOException { + super(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + if (in.readBoolean()) { + profiles = in.readCollectionAsImmutableList(DriverProfile::new); + } else { + profiles = null; + } + } else { + profiles = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + if (profiles == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(profiles); + } + } + } + + public List getProfiles() { + return profiles; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 53c5cf3c8698a..6d7c129140fa2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardsGroup; import org.elasticsearch.action.search.SearchShardsRequest; @@ -22,8 +22,6 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -37,6 +35,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.compute.operator.exchange.RemoteSink; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -57,13 +56,16 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; -import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; @@ -71,9 +73,7 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; -import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -120,6 +120,12 @@ public ComputeService( this.blockFactory = blockFactory; this.esqlExecutor = threadPool.executor(ESQL_THREAD_POOL_NAME); transportService.registerRequestHandler(DATA_ACTION_NAME, this.esqlExecutor, DataNodeRequest::new, new DataNodeRequestHandler()); + transportService.registerRequestHandler( + CLUSTER_ACTION_NAME, + this.esqlExecutor, + ClusterComputeRequest::new, + new ClusterRequestHandler() + ); this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; @@ -144,12 +150,14 @@ public void execute( }); PhysicalPlan coordinatorPlan = new OutputExec(coordinatorAndDataNodePlan.v1(), collectedPages::add); PhysicalPlan dataNodePlan = coordinatorAndDataNodePlan.v2(); - - var concreteIndices = PlannerUtils.planConcreteIndices(physicalPlan); - + if (dataNodePlan != null && dataNodePlan instanceof ExchangeSinkExec == false) { + listener.onFailure(new IllegalStateException("expect data node plan starts with an ExchangeSink; got " + dataNodePlan)); + return; + } + Map clusterToConcreteIndices = transportService.getRemoteClusterService() + .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planConcreteIndices(physicalPlan).toArray(String[]::new)); QueryPragmas queryPragmas = configuration.pragmas(); - - if (concreteIndices.isEmpty()) { + if (dataNodePlan == null || clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0)) { var computeContext = new ComputeContext(sessionId, List.of(), configuration, null, null); runCompute( rootTask, @@ -159,108 +167,204 @@ public void execute( ); return; } - QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan); - - LOGGER.debug("Sending data node plan\n{}\n with filter [{}]", dataNodePlan, requestFilter); - + Map clusterToOriginalIndices = transportService.getRemoteClusterService() + .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planOriginalIndices(physicalPlan)); + var localOriginalIndices = clusterToOriginalIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + var localConcreteIndices = clusterToConcreteIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + if (clusterToOriginalIndices.isEmpty() == false && PlannerUtils.hasEnrich(physicalPlan)) { + listener.onFailure(new IllegalArgumentException("cross clusters query doesn't support enrich yet")); + return; + } final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); - String[] originalIndices = PlannerUtils.planOriginalIndices(physicalPlan); - computeTargetNodes( - rootTask, - requestFilter, - concreteIndices, - originalIndices, - listener.delegateFailureAndWrap((delegate, targetNodes) -> { - final ExchangeSourceHandler exchangeSource = exchangeService.createSourceHandler( + final AtomicBoolean cancelled = new AtomicBoolean(); + final List collectedProfiles = configuration.profile() ? Collections.synchronizedList(new ArrayList<>()) : List.of(); + final var exchangeSource = new ExchangeSourceHandler( + queryPragmas.exchangeBufferSize(), + transportService.getThreadPool().executor(ESQL_THREAD_POOL_NAME) + ); + try ( + Releasable ignored = exchangeSource::decRef; + RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) + ) { + // wait until the source handler is completed + exchangeSource.addCompletionListener(refs.acquire()); + // run compute on the coordinator + runCompute( + rootTask, + new ComputeContext(sessionId, List.of(), configuration, exchangeSource, null), + coordinatorPlan, + cancelOnFailure(rootTask, cancelled, refs.acquire()).map(driverProfiles -> { + responseHeadersCollector.collect(); + if (configuration.profile()) { + collectedProfiles.addAll(driverProfiles); + } + return null; + }) + ); + // starts computes on data nodes on the main cluster + if (localConcreteIndices != null && localConcreteIndices.indices().length > 0) { + startComputeOnDataNodes( sessionId, - queryPragmas.exchangeBufferSize(), - ESQL_THREAD_POOL_NAME + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + rootTask, + configuration, + dataNodePlan, + Set.of(localConcreteIndices.indices()), + localOriginalIndices.indices(), + exchangeSource, + () -> cancelOnFailure(rootTask, cancelled, refs.acquire()).map(response -> { + responseHeadersCollector.collect(); + if (configuration.profile()) { + collectedProfiles.addAll(response.getProfiles()); + } + return null; + }) ); - final List collectedProfiles = configuration.profile() - ? Collections.synchronizedList(new ArrayList<>()) - : null; - try ( - Releasable ignored = exchangeSource::decRef; - RefCountingListener requestRefs = new RefCountingListener( - delegate.map(unused -> new Result(collectedPages, collectedProfiles)) - ) - ) { - final AtomicBoolean cancelled = new AtomicBoolean(); - // wait until the source handler is completed - exchangeSource.addCompletionListener(requestRefs.acquire()); - // run compute on the coordinator - var computeContext = new ComputeContext(sessionId, List.of(), configuration, exchangeSource, null); - runCompute( - rootTask, - computeContext, - coordinatorPlan, - cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(driverProfiles -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(driverProfiles); - } - return null; - }) - ); - // run compute on remote nodes - runComputeOnRemoteNodes( + } + // starts computes on remote cluster + startComputeOnRemoteClusters( + sessionId, + rootTask, + configuration, + dataNodePlan, + exchangeSource, + getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices), + () -> cancelOnFailure(rootTask, cancelled, refs.acquire()).map(response -> { + responseHeadersCollector.collect(); + if (configuration.profile()) { + collectedProfiles.addAll(response.getProfiles()); + } + return null; + }) + ); + } + } + + private List getRemoteClusters( + Map clusterToConcreteIndices, + Map clusterToOriginalIndices + ) { + List remoteClusters = new ArrayList<>(clusterToConcreteIndices.size()); + RemoteClusterService remoteClusterService = transportService.getRemoteClusterService(); + for (Map.Entry e : clusterToConcreteIndices.entrySet()) { + String clusterAlias = e.getKey(); + OriginalIndices concreteIndices = clusterToConcreteIndices.get(clusterAlias); + OriginalIndices originalIndices = clusterToOriginalIndices.get(clusterAlias); + if (originalIndices == null) { + assert false : "can't find original indices for cluster " + clusterAlias; + throw new IllegalStateException("can't find original indices for cluster " + clusterAlias); + } + if (concreteIndices.indices().length > 0) { + Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); + remoteClusters.add(new RemoteCluster(clusterAlias, connection, concreteIndices.indices(), originalIndices.indices())); + } + } + return remoteClusters; + } + + static final class EmptyRemoteSink implements RemoteSink { + final SubscribableListener future = new SubscribableListener<>(); + + @Override + public void fetchPageAsync(boolean allSourcesFinished, ActionListener listener) { + future.addListener(listener.map(ignored -> new ExchangeResponse(null, true))); + } + + void finish() { + future.onResponse(null); + } + } + + private void startComputeOnDataNodes( + String sessionId, + String clusterAlias, + CancellableTask parentTask, + EsqlConfiguration configuration, + PhysicalPlan dataNodePlan, + Set concreteIndices, + String[] originalIndices, + ExchangeSourceHandler exchangeSource, + Supplier> listener + ) { + // Do not complete the exchange sources until we have linked all remote sinks + final EmptyRemoteSink emptyRemoteSink = new EmptyRemoteSink(); + exchangeSource.addRemoteSink(emptyRemoteSink, 1); + QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan); + lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodes -> { + try (RefCountingRunnable refs = new RefCountingRunnable(emptyRemoteSink::finish)) { + // For each target node, first open a remote exchange on the remote node, then link the exchange source to + // the new remote exchange sink, and initialize the computation on the target node via data-node-request. + for (DataNode node : dataNodes) { + var dataNodeListener = ActionListener.releaseAfter(listener.get(), refs.acquire()); + var queryPragmas = configuration.pragmas(); + ExchangeService.openExchange( + transportService, + node.connection, sessionId, - rootTask, - configuration, - dataNodePlan, - exchangeSource, - targetNodes, - () -> cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(response -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(response.profiles); - } - return null; + queryPragmas.exchangeBufferSize(), + esqlExecutor, + dataNodeListener.delegateFailureAndWrap((delegate, unused) -> { + var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); + exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + transportService.sendChildRequest( + node.connection, + DATA_ACTION_NAME, + new DataNodeRequest(sessionId, configuration, clusterAlias, node.shardIds, node.aliasFilters, dataNodePlan), + parentTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(delegate, ComputeResponse::new, esqlExecutor) + ); }) ); } - }) - ); + } + }, e -> { + emptyRemoteSink.finish(); + listener.get().onFailure(e); + })); } - private void runComputeOnRemoteNodes( + private void startComputeOnRemoteClusters( String sessionId, CancellableTask rootTask, EsqlConfiguration configuration, - PhysicalPlan dataNodePlan, + PhysicalPlan plan, ExchangeSourceHandler exchangeSource, - List targetNodes, - Supplier> listener + List clusters, + Supplier> listener ) { // Do not complete the exchange sources until we have linked all remote sinks - final SubscribableListener blockingSinkFuture = new SubscribableListener<>(); - exchangeSource.addRemoteSink( - (sourceFinished, l) -> blockingSinkFuture.addListener(l.map(ignored -> new ExchangeResponse(null, true))), - 1 - ); - try (RefCountingRunnable exchangeRefs = new RefCountingRunnable(() -> blockingSinkFuture.onResponse(null))) { - // For each target node, first open a remote exchange on the remote node, then link the exchange source to - // the new remote exchange sink, and initialize the computation on the target node via data-node-request. - for (TargetNode targetNode : targetNodes) { - var targetNodeListener = ActionListener.releaseAfter(listener.get(), exchangeRefs.acquire()); + final EmptyRemoteSink emptyRemoteSink = new EmptyRemoteSink(); + exchangeSource.addRemoteSink(emptyRemoteSink, 1); + try (RefCountingRunnable refs = new RefCountingRunnable(emptyRemoteSink::finish)) { + for (RemoteCluster cluster : clusters) { + var targetNodeListener = ActionListener.releaseAfter(listener.get(), refs.acquire()); var queryPragmas = configuration.pragmas(); ExchangeService.openExchange( transportService, - targetNode.node(), + cluster.connection, sessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, - targetNodeListener.delegateFailureAndWrap((delegate, unused) -> { - var remoteSink = exchangeService.newRemoteSink(rootTask, sessionId, transportService, targetNode.node); + targetNodeListener.delegateFailureAndWrap((l, unused) -> { + var remoteSink = exchangeService.newRemoteSink(rootTask, sessionId, transportService, cluster.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + var clusterRequest = new ClusterComputeRequest( + cluster.clusterAlias, + sessionId, + configuration, + plan, + cluster.concreteIndices, + cluster.originalIndices + ); transportService.sendChildRequest( - targetNode.node, - DATA_ACTION_NAME, - new DataNodeRequest(sessionId, configuration, targetNode.shardIds, targetNode.aliasFilters, dataNodePlan), + cluster.connection, + CLUSTER_ACTION_NAME, + clusterRequest, rootTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(delegate, DataNodeResponse::new, esqlExecutor) + new ActionListenerResponseHandler<>(l, ComputeResponse::new, esqlExecutor) ); }) ); @@ -327,6 +431,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, } private void acquireSearchContexts( + String clusterAlias, List shardIds, EsqlConfiguration configuration, Map aliasFilters, @@ -355,7 +460,8 @@ private void acquireSearchContexts( var shardRequest = new ShardSearchRequest( shard.shardId(), configuration.absoluteStartedTimeInMillis(), - aliasFilter + aliasFilter, + clusterAlias ); SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); searchContexts.add(context); @@ -379,27 +485,28 @@ private void acquireSearchContexts( } } - record TargetNode(DiscoveryNode node, List shardIds, Map aliasFilters) { + record DataNode(Transport.Connection connection, List shardIds, Map aliasFilters) { + + } + + record RemoteCluster(String clusterAlias, Transport.Connection connection, String[] concreteIndices, String[] originalIndices) { } - private void computeTargetNodes( + /** + * Performs can_match and find the target nodes for the given target indices and filter. + *

+ * Ideally, the search_shards API should be called before the field-caps API; however, this can lead + * to a situation where the column structure (i.e., matched data types) differs depending on the query. + */ + void lookupDataNodes( Task parentTask, + String clusterAlias, QueryBuilder filter, Set concreteIndices, String[] originalIndices, - ActionListener> listener + ActionListener> listener ) { - var remoteIndices = transportService.getRemoteClusterService().groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, originalIndices); - remoteIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - if (remoteIndices.isEmpty() == false) { - listener.onFailure( - new IllegalArgumentException("ES|QL does not yet support querying remote indices " + Arrays.toString(originalIndices)) - ); - return; - } - // Ideally, the search_shards API should be called before the field-caps API; however, this can lead - // to a situation where the column structure (i.e., matched data types) differs depending on the query. ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); ActionListener preservingContextListener = ContextPreservingActionListener.wrapPreservingContext( listener.map(resp -> { @@ -427,13 +534,13 @@ private void computeTargetNodes( nodeToAliasFilters.computeIfAbsent(targetNode, k -> new HashMap<>()).put(shardId.getIndex(), aliasFilter); } } - List targetNodes = new ArrayList<>(nodeToShards.size()); + List dataNodes = new ArrayList<>(nodeToShards.size()); for (Map.Entry> e : nodeToShards.entrySet()) { DiscoveryNode node = nodes.get(e.getKey()); Map aliasFilters = nodeToAliasFilters.getOrDefault(e.getKey(), Map.of()); - targetNodes.add(new TargetNode(node, e.getValue(), aliasFilters)); + dataNodes.add(new DataNode(transportService.getConnection(node), e.getValue(), aliasFilters)); } - return targetNodes; + return dataNodes; }), threadContext ); @@ -446,7 +553,7 @@ private void computeTargetNodes( null, null, false, - null + clusterAlias ); transportService.sendChildRequest( transportService.getLocalNode(), @@ -459,39 +566,6 @@ private void computeTargetNodes( } } - private static class DataNodeResponse extends TransportResponse { - private final List profiles; - - DataNodeResponse(List profiles) { - this.profiles = profiles; - } - - DataNodeResponse(StreamInput in) throws IOException { - super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { - if (in.readBoolean()) { - profiles = in.readCollectionAsImmutableList(DriverProfile::new); - } else { - profiles = null; - } - } else { - profiles = null; - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { - if (profiles == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeCollection(profiles); - } - } - } - } - // TODO: Use an internal action here public static final String DATA_ACTION_NAME = EsqlQueryAction.NAME + "/data"; @@ -502,29 +576,133 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T final var sessionId = request.sessionId(); final var exchangeSink = exchangeService.getSinkHandler(sessionId); parentTask.addListener(() -> exchangeService.finishSinkHandler(sessionId, new TaskCancelledException("task cancelled"))); - final ActionListener listener = new ChannelActionListener<>(channel); + final ActionListener listener = new ChannelActionListener<>(channel); final EsqlConfiguration configuration = request.configuration(); - acquireSearchContexts(request.shardIds(), configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { - var computeContext = new ComputeContext(sessionId, searchContexts, configuration, null, exchangeSink); - runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { - // don't return until all pages are fetched - exchangeSink.addCompletionListener( - ContextPreservingActionListener.wrapPreservingContext( - ActionListener.releaseAfter( - listener.map(nullValue -> new DataNodeResponse(driverProfiles)), - () -> exchangeService.finishSinkHandler(sessionId, null) - ), - transportService.getThreadPool().getThreadContext() - ) - ); + acquireSearchContexts( + request.clusterAlias(), + request.shardIds(), + configuration, + request.aliasFilters(), + ActionListener.wrap(searchContexts -> { + var computeContext = new ComputeContext(sessionId, searchContexts, configuration, null, exchangeSink); + runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { + // don't return until all pages are fetched + exchangeSink.addCompletionListener( + ContextPreservingActionListener.wrapPreservingContext( + ActionListener.releaseAfter( + listener.map(nullValue -> new ComputeResponse(driverProfiles)), + () -> exchangeService.finishSinkHandler(sessionId, null) + ), + transportService.getThreadPool().getThreadContext() + ) + ); + }, e -> { + exchangeService.finishSinkHandler(sessionId, e); + listener.onFailure(e); + })); }, e -> { exchangeService.finishSinkHandler(sessionId, e); listener.onFailure(e); - })); - }, e -> { - exchangeService.finishSinkHandler(sessionId, e); - listener.onFailure(e); - })); + }) + ); + } + } + + public static final String CLUSTER_ACTION_NAME = EsqlQueryAction.NAME + "/cluster"; + + private class ClusterRequestHandler implements TransportRequestHandler { + @Override + public void messageReceived(ClusterComputeRequest request, TransportChannel channel, Task task) { + ChannelActionListener listener = new ChannelActionListener<>(channel); + if (request.plan() instanceof ExchangeSinkExec == false) { + listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); + return; + } + runComputeOnRemoteCluster( + request.clusterAlias(), + request.sessionId(), + (CancellableTask) task, + request.configuration(), + (ExchangeSinkExec) request.plan(), + Set.of(request.indices()), + request.originalIndices(), + listener + ); + } + } + + /** + * Performs a compute on a remote cluster. The output pages are placed in an exchange sink specified by + * {@code globalSessionId}. The coordinator on the main cluster will poll pages from there. + *

+ * Currently, the coordinator on the remote cluster simply collects pages from data nodes in the remote cluster + * and places them in the exchange sink. We can achieve this by using a single exchange buffer to minimize overhead. + * However, here we use two exchange buffers so that we can run an actual plan on this coordinator to perform partial + * reduce operations, such as limit, topN, and partial-to-partial aggregation in the future. + */ + void runComputeOnRemoteCluster( + String clusterAlias, + String globalSessionId, + CancellableTask parentTask, + EsqlConfiguration configuration, + ExchangeSinkExec plan, + Set concreteIndices, + String[] originalIndices, + ActionListener listener + ) { + final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); + parentTask.addListener(() -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException("request cancelled"))); + ThreadPool threadPool = transportService.getThreadPool(); + final var responseHeadersCollector = new ResponseHeadersCollector(threadPool.getThreadContext()); + listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); + final AtomicBoolean cancelled = new AtomicBoolean(); + final List collectedProfiles = configuration.profile() ? Collections.synchronizedList(new ArrayList<>()) : List.of(); + final String localSessionId = clusterAlias + ":" + globalSessionId; + var exchangeSource = new ExchangeSourceHandler( + configuration.pragmas().exchangeBufferSize(), + transportService.getThreadPool().executor(ESQL_THREAD_POOL_NAME) + ); + try ( + Releasable ignored = exchangeSource::decRef; + RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) + ) { + exchangeSource.addCompletionListener(refs.acquire()); + exchangeSink.addCompletionListener(refs.acquire()); + PhysicalPlan coordinatorPlan = new ExchangeSinkExec( + plan.source(), + plan.output(), + plan.isIntermediateAgg(), + new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()) + ); + runCompute( + parentTask, + new ComputeContext(localSessionId, List.of(), configuration, exchangeSource, exchangeSink), + coordinatorPlan, + cancelOnFailure(parentTask, cancelled, refs.acquire()).map(driverProfiles -> { + responseHeadersCollector.collect(); + if (configuration.profile()) { + collectedProfiles.addAll(driverProfiles); + } + return null; + }) + ); + startComputeOnDataNodes( + localSessionId, + clusterAlias, + parentTask, + configuration, + plan, + concreteIndices, + originalIndices, + exchangeSource, + () -> cancelOnFailure(parentTask, cancelled, refs.acquire()).map(r -> { + responseHeadersCollector.collect(); + if (configuration.profile()) { + collectedProfiles.addAll(r.getProfiles()); + } + return null; + }) + ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index d8e5e576386e3..5067e62fa6970 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -33,6 +35,7 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { private static final PlanNameRegistry planNameRegistry = new PlanNameRegistry(); private final String sessionId; private final EsqlConfiguration configuration; + private final String clusterAlias; private final List shardIds; private final Map aliasFilters; private final PhysicalPlan plan; @@ -42,12 +45,14 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { DataNodeRequest( String sessionId, EsqlConfiguration configuration, + String clusterAlias, List shardIds, Map aliasFilters, PhysicalPlan plan ) { this.sessionId = sessionId; this.configuration = configuration; + this.clusterAlias = clusterAlias; this.shardIds = shardIds; this.aliasFilters = aliasFilters; this.plan = plan; @@ -57,6 +62,11 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { super(in); this.sessionId = in.readString(); this.configuration = new EsqlConfiguration(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + this.clusterAlias = in.readString(); + } else { + this.clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + } this.shardIds = in.readCollectionAsList(ShardId::new); this.aliasFilters = in.readMap(Index::new, AliasFilter::readFrom); this.plan = new PlanStreamInput(in, planNameRegistry, in.namedWriteableRegistry(), configuration).readPhysicalPlanNode(); @@ -67,6 +77,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sessionId); configuration.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + out.writeString(clusterAlias); + } out.writeCollection(shardIds); out.writeMap(aliasFilters); new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); @@ -111,6 +124,10 @@ QueryPragmas pragmas() { return configuration.pragmas(); } + String clusterAlias() { + return clusterAlias; + } + List shardIds() { return shardIds; } @@ -143,6 +160,7 @@ public boolean equals(Object o) { DataNodeRequest request = (DataNodeRequest) o; return sessionId.equals(request.sessionId) && configuration.equals(request.configuration) + && clusterAlias.equals(request.clusterAlias) && shardIds.equals(request.shardIds) && aliasFilters.equals(request.aliasFilters) && plan.equals(request.plan) @@ -151,6 +169,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(sessionId, configuration, shardIds, aliasFilters, plan); + return Objects.hash(sessionId, configuration, clusterAlias, shardIds, aliasFilters, plan); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 00e9cf1893411..b9564577e53f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.RestEsqlAsyncQueryAction; +import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; import org.elasticsearch.xpack.esql.execution.PlanExecutor; @@ -144,7 +145,12 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return List.of(new RestEsqlQueryAction(), new RestEsqlAsyncQueryAction(), new RestEsqlGetAsyncResultAction()); + return List.of( + new RestEsqlQueryAction(), + new RestEsqlAsyncQueryAction(), + new RestEsqlGetAsyncResultAction(), + new RestEsqlDeleteAsyncResultAction() + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 7f1667a1dc7bd..5a199a6581156 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -166,7 +166,12 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener EsqlQueryResponse.Profile profile = configuration.profile() ? new EsqlQueryResponse.Profile(result.profiles()) : null; - return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), request.async()); + if (task instanceof EsqlQueryTask asyncTask && request.keepOnCompletion()) { + String id = asyncTask.getExecutionId().getEncoded(); + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), id, false, request.async()); + } else { + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), request.async()); + } }) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java new file mode 100644 index 0000000000000..99cf8be307054 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; + +public class TestBlockFactory { + + private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + /** + * Returns the Non-Breaking block factory. + */ + public static BlockFactory getNonBreakingInstance() { + return NON_BREAKING; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 9f31489ae24f8..ec21386612be3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -31,11 +31,17 @@ import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.type.DataType; @@ -50,6 +56,9 @@ import java.util.List; import java.util.stream.Stream; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.esql.action.ResponseValueUtils.valuesToPage; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; @@ -231,7 +240,58 @@ protected Writeable.Reader instanceReader() { @Override protected EsqlQueryResponse doParseInstance(XContentParser parser) { - return EsqlQueryResponse.fromXContent(parser); + return ResponseBuilder.fromXContent(parser); + } + + public static class ResponseBuilder { + private static final ParseField ID = new ParseField("id"); + private static final ParseField IS_RUNNING = new ParseField("is_running"); + private static final InstantiatingObjectParser PARSER; + + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "esql/query_response", + true, + ResponseBuilder.class + ); + parser.declareString(optionalConstructorArg(), ID); + parser.declareField( + optionalConstructorArg(), + p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? false : p.booleanValue(), + IS_RUNNING, + ObjectParser.ValueType.BOOLEAN_OR_NULL + ); + parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfo.fromXContent(p), new ParseField("columns")); + parser.declareField(constructorArg(), (p, c) -> p.list(), new ParseField("values"), ObjectParser.ValueType.OBJECT_ARRAY); + PARSER = parser.build(); + } + + // Used for XContent reconstruction + private final EsqlQueryResponse response; + + @ParserConstructor + public ResponseBuilder(@Nullable String asyncExecutionId, Boolean isRunning, List columns, List> values) { + this.response = new EsqlQueryResponse( + columns, + List.of(valuesToPage(TestBlockFactory.getNonBreakingInstance(), columns, values)), + null, + false, + asyncExecutionId, + isRunning != null, + isAsync(asyncExecutionId, isRunning) + ); + } + + static boolean isAsync(@Nullable String asyncExecutionId, Boolean isRunning) { + if (asyncExecutionId != null || isRunning != null) { + return true; + } + return false; + } + + static EsqlQueryResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null).response; + } } public void testChunkResponseSizeColumnar() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 5179e760236a6..404c96b4f0722 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; @@ -40,6 +41,7 @@ import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; @@ -148,11 +150,17 @@ protected static Iterable parameterSuppliersFromTypedData(List values) { - return new Page(BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), values)); + return new Page(BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } /** * Hack together a layout by scanning for Fields. * Those will show up in the layout in whatever order a depth first traversal finds them. */ - protected void buildLayout(Layout.Builder builder, Expression e) { + protected static void buildLayout(Layout.Builder builder, Expression e) { if (e instanceof FieldAttribute f) { builder.append(f); return; @@ -432,13 +444,14 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); List simpleData = testCase.getDataValues(); try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { - Block[] orig = BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), simpleData); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { List data = new ArrayList<>(); Block[] blocks = new Block[orig.length]; for (int b = 0; b < blocks.length; b++) { if (b == i) { - blocks[b] = orig[b].elementType().newBlockBuilder(1).appendNull().build(); + blocks[b] = orig[b].elementType().newBlockBuilder(1, blockFactory).appendNull().build(); data.add(null); } else { blocks[b] = orig[b]; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 838044c8b90f6..90692d5b19df1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -88,11 +88,15 @@ protected Expression build(Source source, List args) { public void testEvalCase() { testCase(caseExpr -> { + DriverContext driverContext = driverContext(); + Page page = new Page(driverContext.blockFactory().newConstantIntBlockWith(0, 1)); try ( - EvalOperator.ExpressionEvaluator eval = caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext()); - Block block = eval.eval(new Page(IntBlock.newConstantBlockWith(0, 1))) + EvalOperator.ExpressionEvaluator eval = caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext); + Block block = eval.eval(page) ) { return toJavaObject(block, 0); + } finally { + page.releaseBlocks(); } }); } @@ -148,7 +152,8 @@ public void testCaseWithIncompatibleTypes() { public void testCaseIsLazy() { Case caseExpr = caseExpr(true, 1, true, 2); - try (Block block = caseExpr.toEvaluator(child -> { + DriverContext driveContext = driverContext(); + EvalOperator.ExpressionEvaluator evaluator = caseExpr.toEvaluator(child -> { Object value = child.fold(); if (value != null && value.equals(2)) { return dvrCtx -> new EvalOperator.ExpressionEvaluator() { @@ -163,8 +168,12 @@ public void close() {} }; } return evaluator(child); - }).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1)))) { + }).get(driveContext); + Page page = new Page(driveContext.blockFactory().newConstantIntBlockWith(0, 1)); + try (Block block = evaluator.eval(page)) { assertEquals(1, toJavaObject(block, 0)); + } finally { + page.releaseBlocks(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index abaa382637882..9854dfbe11460 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -12,8 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -84,10 +85,11 @@ protected Expression build(Source source, List args) { } public void testConstantDelimiter() { + DriverContext driverContext = driverContext(); try ( EvalOperator.ExpressionEvaluator eval = evaluator( new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) - ).get(driverContext()) + ).get(driverContext) ) { /* * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a @@ -96,8 +98,12 @@ public void testConstantDelimiter() { */ assert ':' == 58; assertThat(eval.toString(), equalTo("SplitSingleByteEvaluator[str=Attribute[channel=0], delim=58]")); - try (Block block = eval.eval(new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("foo:bar"), 1)))) { + BlockFactory blockFactory = driverContext.blockFactory(); + Page page = new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("foo:bar"), 1)); + try (Block block = eval.eval(page)) { assertThat(toJavaObject(block, 0), equalTo(List.of(new BytesRef("foo"), new BytesRef("bar")))); + } finally { + page.releaseBlocks(); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java new file mode 100644 index 0000000000000..a09cb68c893e0 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.predicate.operator; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.junit.After; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class BreakerTests extends ESTestCase { + @ParametersFactory + public static Iterable parameters() { + List params = new ArrayList<>(); + + Expression expression = new Div( + Source.synthetic("[1] / (long) 2"), + AbstractFunctionTestCase.field("f", DataTypes.LONG), + new Literal(Source.EMPTY, 2, DataTypes.INTEGER) + ); + for (int b = 0; b < 136; b++) { + params.add(new Object[] { ByteSizeValue.ofBytes(b), expression }); + } + return params; + } + + private final List breakers = new ArrayList<>(); + + private final ByteSizeValue limit; + private final Expression expression; + + public BreakerTests(ByteSizeValue limit, Expression expression) { + this.limit = limit; + this.expression = expression; + } + + public void testBreaker() { + DriverContext unlimited = driverContext(ByteSizeValue.ofGb(1)); + DriverContext context = driverContext(limit); + EvalOperator.ExpressionEvaluator eval = AbstractFunctionTestCase.evaluator(expression).get(context); + try (Block b = unlimited.blockFactory().newConstantNullBlock(1)) { + Exception e = expectThrows(CircuitBreakingException.class, () -> eval.eval(new Page(b))); + assertThat(e.getMessage(), equalTo("over test limit")); + } + } + + /** + * A {@link DriverContext} that won't throw {@link CircuitBreakingException}. + */ + private DriverContext driverContext(ByteSizeValue limit) { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + + for (CircuitBreaker breaker : breakers) { + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index f8ace33a4bbc5..fb205d873466c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -10,12 +10,12 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -40,6 +40,8 @@ public class TextFormatTests extends ESTestCase { + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + public void testCsvContentType() { assertEquals("text/csv; charset=utf-8; header=present", CSV.contentType(req())); } @@ -246,7 +248,7 @@ private static EsqlQueryResponse emptyData() { } private static EsqlQueryResponse regularData() { - BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); // headers List headers = asList( new ColumnInfo("string", "keyword"), @@ -258,13 +260,13 @@ private static EsqlQueryResponse regularData() { // values List values = List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("Along The River Bank")) .appendBytesRef(new BytesRef("Mind Train")) .build(), blockFactory.newIntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(-9753, 2611))) .build() @@ -281,8 +283,11 @@ private static EsqlQueryResponse escapedData() { // values List values = List.of( new Page( - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("normal")).appendBytesRef(new BytesRef("commas")).build(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("normal")) + .appendBytesRef(new BytesRef("commas")) + .build(), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("\"quo\"ted\",\n")) .appendBytesRef(new BytesRef("a,b,c,\n,d,e,\t\n")) .build() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index f556e45723b5a..f84f0d676e7e5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -10,11 +10,10 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.SpatialPoint; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -29,7 +28,7 @@ public class TextFormatterTests extends ESTestCase { - static BlockFactory blockFactory = BlockFactory.getNonBreakingInstance(); + static BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); private final List columns = Arrays.asList( new ColumnInfo("foo", "keyword"), @@ -47,15 +46,15 @@ public class TextFormatterTests extends ESTestCase { columns, List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("15charwidedata!")) .appendBytesRef(new BytesRef("dog")) .build(), blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 6.888, 123124.888 }, 2).asBlock(), - Block.constantNullBlock(2), + blockFactory.newConstantNullBlock(2), blockFactory.newDoubleArrayVector(new double[] { 12, 9912 }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("rabbit")).appendBytesRef(new BytesRef("goat")).build(), + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("rabbit")).appendBytesRef(new BytesRef("goat")).build(), blockFactory.newLongArrayVector( new long[] { UTC_DATE_TIME_FORMATTER.parseMillis("1953-09-02T00:00:00.000Z"), @@ -63,11 +62,11 @@ public class TextFormatterTests extends ESTestCase { 2 ).asBlock(), blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(-9753, 2611))) .build(), - Block.constantNullBlock(2) + blockFactory.newConstantNullBlock(2) ) ), null, @@ -118,12 +117,18 @@ public void testFormatWithoutHeader() { columns, List.of( new Page( - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("doggie")).appendBytesRef(new BytesRef("dog")).build(), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("doggie")) + .appendBytesRef(new BytesRef("dog")) + .build(), blockFactory.newLongArrayVector(new long[] { 4, 2 }, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 123124.888 }, 2).asBlock(), - Block.constantNullBlock(2), + blockFactory.newConstantNullBlock(2), blockFactory.newDoubleArrayVector(new double[] { 77.0, 9912.0 }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("wombat")).appendBytesRef(new BytesRef("goat")).build(), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("wombat")) + .appendBytesRef(new BytesRef("goat")) + .build(), blockFactory.newLongArrayVector( new long[] { UTC_DATE_TIME_FORMATTER.parseMillis("1955-01-21T01:02:03.342Z"), @@ -131,11 +136,11 @@ public void testFormatWithoutHeader() { 2 ).asBlock(), blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new SpatialPoint(-9753, 2611))) .build(), - Block.constantNullBlock(2) + blockFactory.newConstantNullBlock(2) ) ), null, @@ -172,7 +177,7 @@ public void testVeryLongPadding() { List.of(new ColumnInfo("foo", "keyword")), List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef(smallFieldContent)) .appendBytesRef(new BytesRef(largeFieldContent)) .build() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 352dccc046588..cbbb0ab32c5c8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.QuantileStates; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -301,7 +301,7 @@ public void testMultipleCombineLimits() { var limitWithMinimum = randomIntBetween(0, numberOfLimits - 1); var fa = getFieldAttribute("a", INTEGER); - var relation = localSource(BlockFactory.getNonBreakingInstance(), singletonList(fa), singletonList(1)); + var relation = localSource(TestBlockFactory.getNonBreakingInstance(), singletonList(fa), singletonList(1)); LogicalPlan plan = relation; for (int i = 0; i < numberOfLimits; i++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 1f2bde2526fab..b0d881755453e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -1892,6 +1892,110 @@ public boolean exists(String field) { assertThat(Expressions.names(localSourceExec.output()), contains("languages", "min", "seen")); } + /** + * Expects + * intermediate plan + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#6) AS c],FINAL,null] + * \_ExchangeExec[[count{r}#16, seen{r}#17],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[COUNT(emp_no{f}#6) AS c]] + * \_Filter[emp_no{f}#6 > 10[INTEGER]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..]]] + * + * and final plan is + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#6) AS c],FINAL,8] + * \_ExchangeExec[[count{r}#16, seen{r}#17],true] + * \_LocalSourceExec[[count{r}#16, seen{r}#17],[LongVectorBlock[vector=ConstantLongVector[positions=1, value=0]]]] + */ + public void testPartialAggFoldingOutput() { + var plan = physicalPlan(""" + from test + | where emp_no > 10 + | stats c = count(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var limit = as(optimized, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + var exchange = as(agg.child(), ExchangeExec.class); + assertThat(Expressions.names(exchange.output()), contains("count", "seen")); + var source = as(exchange.child(), LocalSourceExec.class); + assertThat(Expressions.names(source.output()), contains("count", "seen")); + } + + /** + * Checks that when the folding happens on the coordinator, the intermediate agg state + * are not used anymore. + * + * Expects + * LimitExec[10000[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#5) AS c],FINAL,8] + * \_AggregateExec[[],[COUNT(emp_no{f}#5) AS c],PARTIAL,8] + * \_LimitExec[10[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[emp_no{r}#5]] + * \_EvalExec[[null[INTEGER] AS emp_no]] + * \_EsQueryExec[test], query[][_doc{f}#26], limit[10], sort[] estimatedRowSize[8] + */ + public void testGlobalAggFoldingOutput() { + var plan = physicalPlan(""" + from test + | limit 10 + | stats c = count(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var limit = as(optimized, LimitExec.class); + var aggFinal = as(limit.child(), AggregateExec.class); + var aggPartial = as(aggFinal.child(), AggregateExec.class); + assertThat(Expressions.names(aggPartial.output()), contains("c")); + limit = as(aggPartial.child(), LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + } + + /** + * Checks the folded aggregation preserves the intermediate output. + * + * Expects + * ProjectExec[[a{r}#5]] + * \_EvalExec[[__a_SUM@734e2841{r}#16 / __a_COUNT@12536eab{r}#17 AS a]] + * \_LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SUM(emp_no{f}#6) AS __a_SUM@734e2841, COUNT(emp_no{f}#6) AS __a_COUNT@12536eab],FINAL,24] + * \_ExchangeExec[[sum{r}#18, seen{r}#19, count{r}#20, seen{r}#21],true] + * \_LocalSourceExec[[sum{r}#18, seen{r}#19, count{r}#20, seen{r}#21],[LongArrayBlock[positions=1, mvOrdering=UNORDERED, + * values=[0, + * 0]], BooleanVectorBlock[vector=ConstantBooleanVector[positions=1, value=true]], + * LongVectorBlock[vector=ConstantLongVector[positions=1, value=0]], + * BooleanVectorBlock[vector=ConstantBooleanVector[positions=1, value=true]]]] + */ + public void testPartialAggFoldingOutputForSyntheticAgg() { + var plan = physicalPlan(""" + from test + | where emp_no > 10 + | stats a = avg(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var project = as(optimized, ProjectExec.class); + var eval = as(project.child(), EvalExec.class); + var limit = as(eval.child(), LimitExec.class); + var aggFinal = as(limit.child(), AggregateExec.class); + assertThat(aggFinal.output(), hasSize(2)); + var exchange = as(aggFinal.child(), ExchangeExec.class); + assertThat(Expressions.names(exchange.output()), contains("sum", "seen", "count", "seen")); + var source = as(exchange.child(), LocalSourceExec.class); + assertThat(Expressions.names(source.output()), contains("sum", "seen", "count", "seen")); + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -1941,6 +2045,7 @@ private PhysicalPlan physicalPlan(String query) { var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); + // System.out.println(physical); assertSerialization(physical); return physical; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index b4c9d7a9baeca..1d2b11d3deb89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -12,12 +12,12 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; @@ -162,7 +162,7 @@ private static FieldAttribute field(String name, DataType type) { static DriverContext driverContext() { return new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java index 6a5c8fd3f92c2..af7a66fea9bb2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -18,6 +19,7 @@ import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.GrokBuiltinPatterns; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import java.util.Map; @@ -26,6 +28,8 @@ import static org.hamcrest.Matchers.is; public class GrokEvaluatorExtracterTests extends ESTestCase { + final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + final Map KEY_TO_BLOCK = Map.of("a", 0, "b", 1, "c", 2, "d", 3, "e", 4, "f", 5); final Map TYPES = Map.of( "a", @@ -196,7 +200,7 @@ private void checkBooleanBlock(Block.Builder builder, int[] itemsPerRow, boolean private BytesRefBlock buildInputBlock(int[] mvSize, String... input) { int nextString = 0; - BytesRefBlock.Builder inputBuilder = BytesRefBlock.newBlockBuilder(input.length); + BytesRefBlock.Builder inputBuilder = blockFactory.newBytesRefBlockBuilder(input.length); for (int i = 0; i < mvSize.length; i++) { if (mvSize[i] == 0) { inputBuilder.appendNull(); @@ -222,12 +226,12 @@ private BytesRefBlock buildInputBlock(int[] mvSize, String... input) { private Block.Builder[] buidDefaultTargetBlocks(int estimatedSize) { return new Block.Builder[] { - BytesRefBlock.newBlockBuilder(estimatedSize), - IntBlock.newBlockBuilder(estimatedSize), - LongBlock.newBlockBuilder(estimatedSize), - DoubleBlock.newBlockBuilder(estimatedSize), - DoubleBlock.newBlockBuilder(estimatedSize), - BooleanBlock.newBlockBuilder(estimatedSize) }; + blockFactory.newBytesRefBlockBuilder(estimatedSize), + blockFactory.newIntBlockBuilder(estimatedSize), + blockFactory.newLongBlockBuilder(estimatedSize), + blockFactory.newDoubleBlockBuilder(estimatedSize), + blockFactory.newDoubleBlockBuilder(estimatedSize), + blockFactory.newBooleanBlockBuilder(estimatedSize) }; } private GrokEvaluatorExtracter buildExtracter(String pattern, Map keyToBlock, Map types) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 24fcae0f6bbb0..27a45e71a69c1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -31,6 +30,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -124,7 +124,7 @@ private LocalExecutionPlanner planner() throws IOException { "test", null, BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), Settings.EMPTY, config(), null, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 279917f7ad044..8377530b9fbc2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -26,6 +26,7 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; @@ -293,7 +294,8 @@ private Block extractBlockForColumn(Page page, String columnName) { DocBlock docBlock = page.getBlock(0); IntVector docIndices = docBlock.asVector().docs(); Block originalData = testData.getBlock(columnIndex); - Block.Builder builder = originalData.elementType().newBlockBuilder(docIndices.getPositionCount()); + Block.Builder builder = originalData.elementType() + .newBlockBuilder(docIndices.getPositionCount(), TestBlockFactory.getNonBreakingInstance()); for (int c = 0; c < docIndices.getPositionCount(); c++) { int doc = docIndices.getInt(c); builder.copyFrom(originalData, doc, doc + 1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index 8970617548016..f1701ed696d2c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -82,6 +82,7 @@ protected DataNodeRequest createTestInstance() { DataNodeRequest request = new DataNodeRequest( sessionId, EsqlConfigurationSerializationTests.randomConfiguration(query), + randomAlphaOfLength(10), shardIds, aliasFilters, physicalPlan @@ -92,9 +93,16 @@ protected DataNodeRequest createTestInstance() { @Override protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException { - return switch (between(0, 5)) { + return switch (between(0, 6)) { case 0 -> { - var request = new DataNodeRequest(randomAlphaOfLength(20), in.configuration(), in.shardIds(), in.aliasFilters(), in.plan()); + var request = new DataNodeRequest( + randomAlphaOfLength(20), + in.configuration(), + in.clusterAlias(), + in.shardIds(), + in.aliasFilters(), + in.plan() + ); request.setParentTask(in.getParentTask()); yield request; } @@ -102,6 +110,7 @@ protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException var request = new DataNodeRequest( in.sessionId(), EsqlConfigurationSerializationTests.randomConfiguration(), + in.clusterAlias(), in.shardIds(), in.aliasFilters(), in.plan() @@ -111,7 +120,14 @@ protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException } case 2 -> { List shardIds = randomList(1, 10, () -> new ShardId("new-index-" + between(1, 10), "n/a", between(1, 10))); - var request = new DataNodeRequest(in.sessionId(), in.configuration(), shardIds, in.aliasFilters(), in.plan()); + var request = new DataNodeRequest( + in.sessionId(), + in.configuration(), + in.clusterAlias(), + shardIds, + in.aliasFilters(), + in.plan() + ); request.setParentTask(in.getParentTask()); yield request; } @@ -132,6 +148,7 @@ protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException var request = new DataNodeRequest( in.sessionId(), in.configuration(), + in.clusterAlias(), in.shardIds(), in.aliasFilters(), mapAndMaybeOptimize(parse(newQuery)) @@ -146,18 +163,45 @@ protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException } else { aliasFilters = Map.of(new Index("concrete-index", "n/a"), AliasFilter.of(new TermQueryBuilder("id", "2"), "alias-2")); } - var request = new DataNodeRequest(in.sessionId(), in.configuration(), in.shardIds(), aliasFilters, in.plan()); + var request = new DataNodeRequest( + in.sessionId(), + in.configuration(), + in.clusterAlias(), + in.shardIds(), + aliasFilters, + in.plan() + ); request.setParentTask(request.getParentTask()); yield request; } case 5 -> { - var request = new DataNodeRequest(in.sessionId(), in.configuration(), in.shardIds(), in.aliasFilters(), in.plan()); + var request = new DataNodeRequest( + in.sessionId(), + in.configuration(), + in.clusterAlias(), + in.shardIds(), + in.aliasFilters(), + in.plan() + ); request.setParentTask( randomValueOtherThan(request.getParentTask().getNodeId(), () -> randomAlphaOfLength(10)), randomNonNegativeLong() ); yield request; } + case 6 -> { + var clusterAlias = randomValueOtherThan(in.clusterAlias(), () -> randomAlphaOfLength(10)); + var request = new DataNodeRequest( + in.sessionId(), + in.configuration(), + clusterAlias, + in.shardIds(), + in.aliasFilters(), + in.plan() + ); + request.setParentTask(request.getParentTask()); + yield request; + } default -> throw new AssertionError("invalid value"); }; } diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java index 5f219bd8ce592..652dcefa2a605 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java @@ -232,21 +232,16 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openPointInTimeRequest).actionGet().getPointInTimeId(); try { - assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName).setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), - searchResponse -> { - assertThat(searchResponse.pointInTimeId(), equalTo(pitId)); - assertHitCount(searchResponse, numDocs); - } - ); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> { + assertThat(searchResponse.pointInTimeId(), equalTo(pitId)); + assertHitCount(searchResponse, numDocs); + }); internalCluster().restartNode(assignedNode); ensureGreen(indexName); assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName) - .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(between(1, 10)) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)), @@ -287,7 +282,7 @@ public void testPointInTimeWithDeletedIndices() { indicesAdmin().prepareDelete("index-1").get(); // Return partial results if allow partial search result is allowed assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> { assertFailures(searchResponse); assertHitCount(searchResponse, index2); @@ -296,7 +291,7 @@ public void testPointInTimeWithDeletedIndices() { // Fails if allow partial search result is not allowed expectThrows( ElasticsearchException.class, - prepareSearch().setPreference(null).setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pitId))::get + prepareSearch().setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pitId))::get ); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet(); @@ -322,7 +317,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { .getPointInTimeId(); try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> assertHitCount(searchResponse, numDocs) ); } finally { @@ -338,7 +333,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { .actionGet() .getPointInTimeId(); try { - assertHitCountAndNoFailures(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), 0); + assertHitCountAndNoFailures(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), 0); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet(); } diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java index ca848c8bb8c44..5a7453903b13b 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java @@ -150,11 +150,7 @@ public void testCloseFreezeAndOpen() throws Exception { try { for (int from = 0; from < 3; from++) { assertResponse( - client().prepareSearch() - .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) - .setPointInTime(new PointInTimeBuilder(pitId)) - .setSize(1) - .setFrom(from), + client().prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(1).setFrom(from), response -> { assertHitCount(response, 3); assertEquals(1, response.getHits().getHits().length); diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index d04bb88325cc7..ad5e224efd5db 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -78,16 +78,24 @@ public class CountedKeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "counted_keyword"; public static final String COUNT_FIELD_NAME_SUFFIX = "_count"; - public static final FieldType FIELD_TYPE; + private static final FieldType FIELD_TYPE_INDEXED; + private static final FieldType FIELD_TYPE_NOT_INDEXED; static { - FieldType ft = new FieldType(); - ft.setDocValuesType(DocValuesType.SORTED_SET); - ft.setTokenized(false); - ft.setOmitNorms(true); - ft.setIndexOptions(IndexOptions.DOCS); - ft.freeze(); - FIELD_TYPE = freezeAndDeduplicateFieldType(ft); + FieldType indexed = new FieldType(); + indexed.setDocValuesType(DocValuesType.SORTED_SET); + indexed.setTokenized(false); + indexed.setOmitNorms(true); + indexed.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE_INDEXED = freezeAndDeduplicateFieldType(indexed); + + FieldType notIndexed = new FieldType(); + notIndexed.setDocValuesType(DocValuesType.SORTED_SET); + notIndexed.setTokenized(false); + notIndexed.setOmitNorms(true); + notIndexed.setIndexOptions(IndexOptions.NONE); + FIELD_TYPE_NOT_INDEXED = freezeAndDeduplicateFieldType(notIndexed); + } private static class CountedKeywordFieldType extends StringFieldType { @@ -261,7 +269,12 @@ public TermsEnum termsEnum() throws IOException { } } + private static CountedKeywordFieldMapper toType(FieldMapper in) { + return (CountedKeywordFieldMapper) in; + } + public static class Builder extends FieldMapper.Builder { + private final Parameter indexed = Parameter.indexParam(m -> toType(m).mappedFieldType.isIndexed(), true); private final Parameter> meta = Parameter.metaParam(); protected Builder(String name) { @@ -270,22 +283,24 @@ protected Builder(String name) { @Override protected Parameter[] getParameters() { - return new Parameter[] { meta }; + return new Parameter[] { meta, indexed }; } @Override public FieldMapper build(MapperBuilderContext context) { BinaryFieldMapper countFieldMapper = new BinaryFieldMapper.Builder(name + COUNT_FIELD_NAME_SUFFIX, true).build(context); + boolean isIndexed = indexed.getValue(); + FieldType ft = isIndexed ? FIELD_TYPE_INDEXED : FIELD_TYPE_NOT_INDEXED; return new CountedKeywordFieldMapper( name, - FIELD_TYPE, + ft, new CountedKeywordFieldType( context.buildFullName(name), - true, + isIndexed, false, true, - new TextSearchInfo(FIELD_TYPE, null, KEYWORD_ANALYZER, KEYWORD_ANALYZER), + new TextSearchInfo(ft, null, KEYWORD_ANALYZER, KEYWORD_ANALYZER), meta.getValue(), countFieldMapper.fieldType() ), diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java index 1468ed456b132..2ffd4468c814a 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.countedkeyword; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -82,4 +84,15 @@ public void testDottedFieldNames() throws IOException { List fields = doc.rootDoc().getFields("dotted.field"); assertEquals(1, fields.size()); } + + public void testDisableIndex() throws IOException { + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE).field("index", false)) + ); + ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); + List fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.size()); + assertEquals(IndexOptions.NONE, fields.get(0).fieldType().indexOptions()); + assertEquals(DocValuesType.SORTED_SET, fields.get(0).fieldType().docValuesType()); + } } diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index 1cc37f5c4ffc0..9d931974d25d5 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -49,16 +50,29 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' + maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' + maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' + maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } + +//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses +// the previous minor version, that setting is not available when running in FIPS until 8.14. +def maybeDisableForFips(task) { + if (BuildParams.inFipsJvm) { + if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { + task.enabled = false + } + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java index ecc601b0f1eae..5ce0a24a40d9d 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.query.QueryBuilders; @@ -27,11 +28,11 @@ import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; @@ -203,26 +204,26 @@ public void testSimultaneousExplainSameConfig() throws IOException { ) .buildForExplain(); - List> futures = new ArrayList<>(); - - for (int i = 0; i < simultaneousInvocationCount; ++i) { - futures.add(client().execute(ExplainDataFrameAnalyticsAction.INSTANCE, new ExplainDataFrameAnalyticsAction.Request(config))); - } - - ExplainDataFrameAnalyticsAction.Response previous = null; - for (ActionFuture future : futures) { - // The main purpose of this test is that actionGet() here will throw an exception - // if any of the simultaneous calls returns an error due to interaction between - // the many estimation processes that get run - ExplainDataFrameAnalyticsAction.Response current = future.actionGet(10000); - if (previous != null) { - // A secondary check the test can perform is that the multiple invocations - // return the same result (but it was failures due to unwanted interactions - // that caused this test to be written) - assertEquals(previous, current); + safeAwait(SubscribableListener.newForked(testListener -> { + try (var listeners = new RefCountingListener(testListener)) { + final var firstResponseRef = new AtomicReference(); + for (int i = 0; i < simultaneousInvocationCount; ++i) { + client().execute( + ExplainDataFrameAnalyticsAction.INSTANCE, + new ExplainDataFrameAnalyticsAction.Request(config), + // The main purpose of this test is that the action will complete its listener exceptionally if any of the + // simultaneous calls returns an error due to interaction between the many estimation processes that get run. + listeners.acquire(response -> { + // A secondary check the test can perform is that the multiple invocations return the same result + // (but it was failures due to unwanted interactions that caused this test to be written) + assertNotNull(response); + firstResponseRef.compareAndSet(null, response); + assertEquals(firstResponseRef.get(), response); + }) + ); + } } - previous = current; - } + })); } public void testRuntimeFields() { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 858c5ba946f78..ecfb2f81bf452 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -80,6 +80,8 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -91,8 +93,6 @@ import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.autoscaling.MlScalingReason; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.slm.SnapshotLifecycle; import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; import org.elasticsearch.xpack.transform.Transform; diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java index 5cf87cff66a25..9b3326a4ba348 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinition; import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinitionTests; @@ -36,7 +37,6 @@ import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.modelsize.ModelSizeInfo; import org.elasticsearch.xpack.ml.inference.modelsize.ModelSizeInfoTests; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index f9c483496445e..f3254245168b8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -74,6 +74,7 @@ import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -188,6 +189,8 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; import org.elasticsearch.xpack.core.ml.dataframe.stats.AnalysisStatsNamedWriteablesProvider; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; @@ -318,10 +321,8 @@ import org.elasticsearch.xpack.ml.dataframe.process.NativeMemoryUsageEstimationProcessFactory; import org.elasticsearch.xpack.ml.dataframe.process.results.AnalyticsResult; import org.elasticsearch.xpack.ml.dataframe.process.results.MemoryUsageEstimationResult; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterService; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; @@ -910,6 +911,7 @@ public Collection createComponents(PluginServices services) { Environment environment = services.environment(); NamedXContentRegistry xContentRegistry = services.xContentRegistry(); IndexNameExpressionResolver indexNameExpressionResolver = services.indexNameExpressionResolver(); + TelemetryProvider telemetryProvider = services.telemetryProvider(); if (enabled == false) { // Holders for @link(MachineLearningFeatureSetUsage) which needs access to job manager and ML extension, @@ -1051,7 +1053,7 @@ public Collection createComponents(PluginServices services) { normalizerProcessFactory = (jobId, quantilesState, bucketSpan, executorService) -> new MultiplyingNormalizerProcess(1.0); analyticsProcessFactory = (jobId, analyticsProcessConfig, hasState, executorService, onProcessCrash) -> null; memoryEstimationProcessFactory = (jobId, analyticsProcessConfig, hasState, executorService, onProcessCrash) -> null; - pyTorchProcessFactory = (task, executorService, onProcessCrash) -> new BlackHolePyTorchProcess(); + pyTorchProcessFactory = (task, executorService, afterInputStreamClose, onProcessCrash) -> new BlackHolePyTorchProcess(); } NormalizerFactory normalizerFactory = new NormalizerFactory( normalizerProcessFactory, @@ -1251,6 +1253,14 @@ public Collection createComponents(PluginServices services) { machineLearningExtension.get().isNlpEnabled() ); + MlMetrics mlMetrics = new MlMetrics( + telemetryProvider.getMeterRegistry(), + clusterService, + settings, + autodetectProcessManager, + dataFrameAnalyticsManager + ); + return List.of( mlLifeCycleService, new MlControllerHolder(mlController), @@ -1282,7 +1292,8 @@ public Collection createComponents(PluginServices services) { trainedModelAllocationClusterServiceSetOnce.get(), deploymentManager.get(), nodeAvailabilityZoneMapper, - new MachineLearningExtensionHolder(machineLearningExtension.get()) + new MachineLearningExtensionHolder(machineLearningExtension.get()), + mlMetrics ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java index c6a360a018e2a..976e5ec255b85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java @@ -15,9 +15,9 @@ import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunner; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.process.MlController; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java new file mode 100644 index 0000000000000..f2cedd4bf0f6b --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java @@ -0,0 +1,563 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.elasticsearch.xpack.core.ml.MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT; +import static org.elasticsearch.xpack.core.ml.MlTasks.DATAFEED_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.JOB_TASK_NAME; +import static org.elasticsearch.xpack.ml.MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD; + +/** + * This class adds two types of ML metrics to the meter registry, such that they can be collected by Elastic APM. + *

+ * 1. Per-node ML native memory statistics for ML nodes + * 2. Cluster-wide job/model statuses for master-eligible nodes + *

+ * The memory metrics relate solely to the ML node they are collected from. + *

+ * The job/model metrics are cluster-wide because a key problem we want to be able to detect is when there are + * jobs or models that are not assigned to any node. The consumer of the data needs to account for the fact that + * multiple master-eligible nodes are reporting the same information. The es.ml.is_master attribute in the records + * indicates which one was actually master, so can be used to deduplicate. + */ +public final class MlMetrics extends AbstractLifecycleComponent implements ClusterStateListener { + + private static final Logger logger = LogManager.getLogger(MlMetrics.class); + + private final MeterRegistry meterRegistry; + private final ClusterService clusterService; + private final AutodetectProcessManager autodetectProcessManager; + private final DataFrameAnalyticsManager dataFrameAnalyticsManager; + private final boolean hasMasterRole; + private final boolean hasMlRole; + private final List metrics = new ArrayList<>(); + + private static final Map MASTER_TRUE_MAP = Map.of("es.ml.is_master", Boolean.TRUE); + private static final Map MASTER_FALSE_MAP = Map.of("es.ml.is_master", Boolean.FALSE); + private volatile Map isMasterMap = MASTER_FALSE_MAP; + private volatile boolean firstTime = true; + + private volatile MlTaskStatusCounts mlTaskStatusCounts = MlTaskStatusCounts.EMPTY; + private volatile TrainedModelAllocationCounts trainedModelAllocationCounts = TrainedModelAllocationCounts.EMPTY; + + private volatile long nativeMemLimit; + private volatile long nativeMemAdUsage; + private volatile long nativeMemDfaUsage; + private volatile long nativeMemTrainedModelUsage; + private volatile long nativeMemFree; + + public MlMetrics( + MeterRegistry meterRegistry, + ClusterService clusterService, + Settings settings, + AutodetectProcessManager autodetectProcessManager, + DataFrameAnalyticsManager dataFrameAnalyticsManager + ) { + this.meterRegistry = meterRegistry; + this.clusterService = clusterService; + this.autodetectProcessManager = autodetectProcessManager; + this.dataFrameAnalyticsManager = dataFrameAnalyticsManager; + hasMasterRole = DiscoveryNode.hasRole(settings, DiscoveryNodeRole.MASTER_ROLE); + hasMlRole = DiscoveryNode.hasRole(settings, DiscoveryNodeRole.ML_ROLE); + if (hasMasterRole || hasMlRole) { + clusterService.addListener(this); + } + } + + private void registerMlNodeMetrics(MeterRegistry meterRegistry) { + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.limit", + "ML native memory limit on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemLimit, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.anomaly_detectors", + "ML native memory used by anomaly detection jobs on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemAdUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.data_frame_analytics", + "ML native memory used by data frame analytics jobs on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemDfaUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.trained_models", + "ML native memory used by trained models on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemTrainedModelUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.free", + "Free ML native memory on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemFree, Map.of()) + ) + ); + } + + private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.opening.count", + "Count of anomaly detection jobs in the opening state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adOpeningCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.opened.count", + "Count of anomaly detection jobs in the opened state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adOpenedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.closing.count", + "Count of anomaly detection jobs in the closing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adClosingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.failed.count", + "Count of anomaly detection jobs in the failed state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adFailedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.starting.count", + "Count of datafeeds in the starting state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.started.count", + "Count of datafeeds in the started state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.stopping.count", + "Count of datafeeds in the stopping state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStoppingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.starting.count", + "Count of data frame analytics jobs in the starting state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.started.count", + "Count of data frame analytics jobs in the started state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.reindexing.count", + "Count of data frame analytics jobs in the reindexing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaReindexingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.analyzing.count", + "Count of data frame analytics jobs in the analyzing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaAnalyzingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.stopping.count", + "Count of data frame analytics jobs in the stopping state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStoppingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.failed.count", + "Count of data frame analytics jobs in the failed state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaFailedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.target_allocations.count", + "Sum of target trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsTargetAllocations, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.current_allocations.count", + "Sum of current trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsCurrentAllocations, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.failed_allocations.count", + "Sum of failed trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsFailedAllocations, isMasterMap) + ) + ); + } + + @Override + protected void doStart() { + metrics.clear(); + if (hasMasterRole) { + registerMasterNodeMetrics(meterRegistry); + } + if (hasMlRole) { + registerMlNodeMetrics(meterRegistry); + } + } + + @Override + protected void doStop() {} + + @Override + protected void doClose() { + metrics.forEach(metric -> { + try { + metric.close(); + } catch (Exception e) { + logger.warn("metrics close() method should not throw Exception", e); + } + }); + } + + /** + * Metric values are recalculated in response to cluster state changes and then cached. + * This means that the telemetry provider can poll the metrics registry as often as it + * likes without causing extra work in recalculating the metric values. + */ + @Override + public void clusterChanged(ClusterChangedEvent event) { + isMasterMap = event.localNodeMaster() ? MASTER_TRUE_MAP : MASTER_FALSE_MAP; + + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // Wait until the gateway has recovered from disk. + return; + } + + boolean mustRecalculateFreeMem = false; + + final ClusterState currentState = event.state(); + final ClusterState previousState = event.previousState(); + + if (firstTime || event.metadataChanged()) { + final PersistentTasksCustomMetadata tasks = currentState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE); + final PersistentTasksCustomMetadata oldTasks = firstTime + ? null + : previousState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE); + if (tasks != null && tasks.equals(oldTasks) == false) { + if (hasMasterRole) { + mlTaskStatusCounts = findTaskStatuses(tasks); + } + if (hasMlRole) { + nativeMemAdUsage = findAdMemoryUsage(autodetectProcessManager); + nativeMemDfaUsage = findDfaMemoryUsage(dataFrameAnalyticsManager, tasks); + mustRecalculateFreeMem = true; + } + } + } + + final TrainedModelAssignmentMetadata currentMetadata = TrainedModelAssignmentMetadata.fromState(currentState); + final TrainedModelAssignmentMetadata previousMetadata = firstTime ? null : TrainedModelAssignmentMetadata.fromState(previousState); + if (currentMetadata != null && currentMetadata.equals(previousMetadata) == false) { + if (hasMasterRole) { + trainedModelAllocationCounts = findTrainedModelAllocationCounts(currentMetadata); + } + if (hasMlRole) { + nativeMemTrainedModelUsage = findTrainedModelMemoryUsage(currentMetadata, currentState.nodes().getLocalNode().getId()); + mustRecalculateFreeMem = true; + } + } + + if (firstTime) { + firstTime = false; + nativeMemLimit = findNativeMemoryLimit(currentState.nodes().getLocalNode(), clusterService.getClusterSettings()); + mustRecalculateFreeMem = true; + // Install a listener to recalculate limit and free in response to settings changes. + // This isn't done in the constructor, but instead only after the three usage variables + // have been populated. Doing this means that immediately after startup, when the stats + // are inaccurate, they'll _all_ be zero. Installing the settings listeners immediately + // could mean that free would be misleadingly set based on zero usage when actual usage + // is _not_ zero. + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(USE_AUTO_MACHINE_MEMORY_PERCENT, s -> memoryLimitClusterSettingUpdated()); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, s -> memoryLimitClusterSettingUpdated()); + } + + if (mustRecalculateFreeMem) { + nativeMemFree = findNativeMemoryFree(nativeMemLimit, nativeMemAdUsage, nativeMemDfaUsage, nativeMemTrainedModelUsage); + } + } + + /** + * This method is registered to be called whenever a cluster setting is changed that affects + * any of the calculations this class performs. + */ + private void memoryLimitClusterSettingUpdated() { + nativeMemLimit = findNativeMemoryLimit(clusterService.localNode(), clusterService.getClusterSettings()); + nativeMemFree = findNativeMemoryFree(nativeMemLimit, nativeMemAdUsage, nativeMemDfaUsage, nativeMemTrainedModelUsage); + } + + /** + * Returns up-to-date stats about the states of the ML entities that are persistent tasks. + * Currently this includes: + * - Anomaly detection jobs + * - Datafeeds + * - Data frame analytics jobs + *

+ * In the future it could possibly also include model snapshot upgrade tasks. + *

+ * These stats relate to the whole cluster and not just the current node. + *

+ * The caller is expected to cache the returned stats to avoid unnecessary recalculation. + */ + static MlTaskStatusCounts findTaskStatuses(PersistentTasksCustomMetadata tasks) { + + int adOpeningCount = 0; + int adOpenedCount = 0; + int adClosingCount = 0; + int adFailedCount = 0; + int datafeedStartingCount = 0; + int datafeedStartedCount = 0; + int datafeedStoppingCount = 0; + int dfaStartingCount = 0; + int dfaStartedCount = 0; + int dfaReindexingCount = 0; + int dfaAnalyzingCount = 0; + int dfaStoppingCount = 0; + int dfaFailedCount = 0; + + for (PersistentTasksCustomMetadata.PersistentTask task : tasks.tasks()) { + switch (task.getTaskName()) { + case JOB_TASK_NAME: + switch (MlTasks.getJobStateModifiedForReassignments(task)) { + case OPENING -> ++adOpeningCount; + case OPENED -> ++adOpenedCount; + case CLOSING -> ++adClosingCount; + case FAILED -> ++adFailedCount; + } + break; + case DATAFEED_TASK_NAME: + switch (MlTasks.getDatafeedState(task)) { + case STARTING -> ++datafeedStartingCount; + case STARTED -> ++datafeedStartedCount; + case STOPPING -> ++datafeedStoppingCount; + } + break; + case DATA_FRAME_ANALYTICS_TASK_NAME: + switch (MlTasks.getDataFrameAnalyticsState(task)) { + case STARTING -> ++dfaStartingCount; + case STARTED -> ++dfaStartedCount; + case REINDEXING -> ++dfaReindexingCount; + case ANALYZING -> ++dfaAnalyzingCount; + case STOPPING -> ++dfaStoppingCount; + case FAILED -> ++dfaFailedCount; + } + break; + case JOB_SNAPSHOT_UPGRADE_TASK_NAME: + // Not currently tracked + // TODO: consider in the future, especially when we're at the stage of needing to upgrade serverless model snapshots + break; + } + } + + return new MlTaskStatusCounts( + adOpeningCount, + adOpenedCount, + adClosingCount, + adFailedCount, + datafeedStartingCount, + datafeedStartedCount, + datafeedStoppingCount, + dfaStartingCount, + dfaStartedCount, + dfaReindexingCount, + dfaAnalyzingCount, + dfaStoppingCount, + dfaFailedCount + ); + } + + /** + * Return the memory usage, in bytes, of the anomaly detection jobs that are running on the + * current node. + */ + static long findAdMemoryUsage(AutodetectProcessManager autodetectProcessManager) { + return autodetectProcessManager.getOpenProcessMemoryUsage().getBytes(); + } + + /** + * Return the memory usage, in bytes, of the data frame analytics jobs that are running on the + * current node. + */ + static long findDfaMemoryUsage(DataFrameAnalyticsManager dataFrameAnalyticsManager, PersistentTasksCustomMetadata tasks) { + return dataFrameAnalyticsManager.getActiveTaskMemoryUsage(tasks).getBytes(); + } + + /** + * Returns up-to-date stats about the numbers of allocations of ML trained models. + *

+ * These stats relate to the whole cluster and not just the current node. + *

+ * The caller is expected to cache the returned stats to avoid unnecessary recalculation. + */ + static TrainedModelAllocationCounts findTrainedModelAllocationCounts(TrainedModelAssignmentMetadata metadata) { + int trainedModelsTargetAllocations = 0; + int trainedModelsCurrentAllocations = 0; + int trainedModelsFailedAllocations = 0; + + for (TrainedModelAssignment trainedModelAssignment : metadata.allAssignments().values()) { + trainedModelsTargetAllocations += trainedModelAssignment.totalTargetAllocations(); + trainedModelsCurrentAllocations += trainedModelAssignment.totalCurrentAllocations(); + trainedModelsFailedAllocations += trainedModelAssignment.totalFailedAllocations(); + } + + return new TrainedModelAllocationCounts( + trainedModelsTargetAllocations, + trainedModelsCurrentAllocations, + trainedModelsFailedAllocations + ); + } + + /** + * Return the memory usage, in bytes, of the trained models that are running on the + * current node. + */ + static long findTrainedModelMemoryUsage(TrainedModelAssignmentMetadata metadata, String localNodeId) { + long trainedModelMemoryUsageBytes = 0; + for (TrainedModelAssignment assignment : metadata.allAssignments().values()) { + if (Optional.ofNullable(assignment.getNodeRoutingTable().get(localNodeId)) + .map(RoutingInfo::getState) + .orElse(RoutingState.STOPPED) + .consumesMemory()) { + trainedModelMemoryUsageBytes += assignment.getTaskParams().estimateMemoryUsageBytes(); + } + } + return trainedModelMemoryUsageBytes; + } + + /** + * Return the maximum amount of memory, in bytes, permitted for ML processes running on the + * current node. + */ + static long findNativeMemoryLimit(DiscoveryNode localNode, ClusterSettings settings) { + return NativeMemoryCalculator.allowedBytesForMl(localNode, settings).orElse(0L); + } + + /** + * Return the amount of free memory, in bytes, that remains available for ML processes running on the + * current node. + */ + static long findNativeMemoryFree(long nativeMemLimit, long nativeMemAdUsage, long nativeMemDfaUsage, long nativeMemTrainedModelUsage) { + long totalUsage = nativeMemAdUsage + nativeMemDfaUsage + nativeMemTrainedModelUsage; + if (totalUsage > 0) { + totalUsage += NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + } + return nativeMemLimit - totalUsage; + } + + record MlTaskStatusCounts( + int adOpeningCount, + int adOpenedCount, + int adClosingCount, + int adFailedCount, + int datafeedStartingCount, + int datafeedStartedCount, + int datafeedStoppingCount, + int dfaStartingCount, + int dfaStartedCount, + int dfaReindexingCount, + int dfaAnalyzingCount, + int dfaStoppingCount, + int dfaFailedCount + ) { + static final MlTaskStatusCounts EMPTY = new MlTaskStatusCounts(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + } + + record TrainedModelAllocationCounts( + int trainedModelsTargetAllocations, + int trainedModelsCurrentAllocations, + int trainedModelsFailedAllocations + ) { + static final TrainedModelAllocationCounts EMPTY = new TrainedModelAllocationCounts(0, 0, 0); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java index 44235882a6582..5ecd0322674e1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.core.ml.action.ClearDeploymentCacheAction.Request; import org.elasticsearch.xpack.core.ml.action.ClearDeploymentCacheAction.Response; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import java.util.List; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index 7442f1db0a662..9c368c1a162a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -28,9 +28,9 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils; import java.util.ArrayList; import java.util.function.Supplier; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index 093e4213a5db1..49f73056cd8bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -41,9 +41,9 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.action.DeleteTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java index 73601ef86ff13..fe8a4ff029d69 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java @@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteTrainedModelAliasAction; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import java.util.HashMap; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java index 36d225a943348..14afd6999b0c0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.ModelStats; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index e6d1fe30d7646..78d030d454f0b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -21,11 +21,11 @@ import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Response; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import java.util.Collections; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 3c9ba3700dc8e..76321608ba4fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -43,16 +43,16 @@ import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelSizeStats; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 3cf0189c28df2..6a8dca8e2776b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -32,15 +32,15 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction.Request; import org.elasticsearch.xpack.core.ml.action.InferModelAction.Response; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 75eb85fbce701..5206799735c52 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -60,8 +60,10 @@ import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction.Request; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction.Response; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedInferenceConfig; @@ -72,8 +74,6 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.utils.TaskRetriever; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java index de760d8fa17ed..79560b8b8e94e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java @@ -36,14 +36,14 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAliasAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java index 2cb8fc847bb62..7c52e086ec43c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.SetResetModeAction; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; public class TransportSetResetModeAction extends AbstractTransportSetResetModeAction { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 4a569b374582a..ecfe4c8aac6c6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -56,13 +56,13 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.IndexLocation; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java index 6e90d097d1e9f..5b2c3fdeddf43 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java @@ -35,10 +35,10 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterService; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index 57d0084065fa5..cca59f27d5c76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -17,11 +17,11 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import java.util.Collection; import java.util.List; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java index 5605a80a7454c..44cf1188b09a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderContext; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.utils.MlProcessors; import java.time.Instant; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 829101b3bd551..223154737df3f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -20,12 +20,16 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.MlStatsIndex; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -45,7 +49,10 @@ import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; +import java.util.Map; import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -72,6 +79,8 @@ public class DataFrameAnalyticsManager { /** Indicates whether the node is shutting down. */ private final AtomicBoolean nodeShuttingDown = new AtomicBoolean(); + private final Map memoryLimitById; + public DataFrameAnalyticsManager( Settings settings, NodeClient client, @@ -84,6 +93,37 @@ public DataFrameAnalyticsManager( ResultsPersisterService resultsPersisterService, ModelLoadingService modelLoadingService, String[] destIndexAllowedSettings + ) { + this( + settings, + client, + threadPool, + clusterService, + configProvider, + processManager, + auditor, + expressionResolver, + resultsPersisterService, + modelLoadingService, + destIndexAllowedSettings, + new ConcurrentHashMap<>() + ); + } + + // For testing only + public DataFrameAnalyticsManager( + Settings settings, + NodeClient client, + ThreadPool threadPool, + ClusterService clusterService, + DataFrameAnalyticsConfigProvider configProvider, + AnalyticsProcessManager processManager, + DataFrameAnalyticsAuditor auditor, + IndexNameExpressionResolver expressionResolver, + ResultsPersisterService resultsPersisterService, + ModelLoadingService modelLoadingService, + String[] destIndexAllowedSettings, + Map memoryLimitById ) { this.settings = Objects.requireNonNull(settings); this.client = Objects.requireNonNull(client); @@ -96,11 +136,13 @@ public DataFrameAnalyticsManager( this.resultsPersisterService = Objects.requireNonNull(resultsPersisterService); this.modelLoadingService = Objects.requireNonNull(modelLoadingService); this.destIndexAllowedSettings = Objects.requireNonNull(destIndexAllowedSettings); + this.memoryLimitById = Objects.requireNonNull(memoryLimitById); } public void execute(DataFrameAnalyticsTask task, ClusterState clusterState, TimeValue masterNodeTimeout) { // With config in hand, determine action to take ActionListener configListener = ActionListener.wrap(config -> { + memoryLimitById.put(config.getId(), config.getModelMemoryLimit()); // Check if existing destination index is incompatible. // If it is, we delete it and start from reindexing. IndexMetadata destIndex = clusterState.getMetadata().index(config.getDest().getIndex()); @@ -224,6 +266,7 @@ private void executeStep(DataFrameAnalyticsTask task, DataFrameAnalyticsConfig c case FINAL -> { LOGGER.info("[{}] Marking task completed", config.getId()); task.markAsCompleted(); + memoryLimitById.remove(config.getId()); } default -> task.markAsFailed(ExceptionsHelper.serverError("Unknown step [{}]", step)); } @@ -291,4 +334,34 @@ public boolean isNodeShuttingDown() { public void markNodeAsShuttingDown() { nodeShuttingDown.set(true); } + + /** + * Get the memory limit for a data frame analytics job if known. + * The memory limit will only be known if it is running on the + * current node, or has been very recently. + * @param id Data frame analytics job ID. + * @return The {@link ByteSizeValue} representing the memory limit, if known, otherwise {@link Optional#empty}. + */ + public Optional getMemoryLimitIfKnown(String id) { + return Optional.ofNullable(memoryLimitById.get(id)); + } + + /** + * Finds the memory used by data frame analytics jobs that are active on the current node. + * This includes jobs that are in the reindexing state, even though they don't have a running + * process, because we want to ensure that when they get as far as needing to run a process + * there'll be space for it. + * @param tasks Persistent tasks metadata. + * @return Memory used by data frame analytics jobs that are active on the current node. + */ + public ByteSizeValue getActiveTaskMemoryUsage(PersistentTasksCustomMetadata tasks) { + long memoryUsedBytes = 0; + for (Map.Entry entry : memoryLimitById.entrySet()) { + DataFrameAnalyticsState state = MlTasks.getDataFrameAnalyticsState(entry.getKey(), tasks); + if (state.consumesMemory()) { + memoryUsedBytes += entry.getValue().getBytes() + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes(); + } + } + return ByteSizeValue.ofBytes(memoryUsedBytes); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index a1ac1aa55c320..471615e8bbd6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; @@ -67,8 +68,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.NODES_CHANGED_REASON; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.createShuttingDownRoute; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.NODES_CHANGED_REASON; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.createShuttingDownRoute; public class TrainedModelAssignmentClusterService implements ClusterStateListener { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index fdb007862cfdc..3fac7c387b12e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -66,8 +67,8 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.MlTasks.TRAINED_MODEL_ASSIGNMENT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.TRAINED_MODEL_ASSIGNMENT_TASK_TYPE; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.NODE_IS_SHUTTING_DOWN; import static org.elasticsearch.xpack.ml.MachineLearning.ML_PYTORCH_MODEL_INFERENCE_FEATURE; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.NODE_IS_SHUTTING_DOWN; public class TrainedModelAssignmentNodeService implements ClusterStateListener { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index 6e6b447fcea3d..a1142796558f4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlan; import org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlanner; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java index 1a5b5481704a4..0609e0e6ff916 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAssignmentRoutingInfoAction; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import java.util.Objects; import java.util.function.Predicate; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index f48e67f377817..ef5de2718e702 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -496,7 +496,14 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene } logger.debug("[{}] start and load", task.getDeploymentId()); - process.set(pyTorchProcessFactory.createProcess(task, executorServiceForProcess, this::onProcessCrash)); + process.set( + pyTorchProcessFactory.createProcess( + task, + executorServiceForProcess, + () -> resultProcessor.awaitCompletion(COMPLETION_TIMEOUT.getMinutes(), TimeUnit.MINUTES), + this::onProcessCrash + ) + ); startTime = Instant.now(); logger.debug("[{}] process started", task.getDeploymentId()); try { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index e9b7a1a3e137b..5994c61f46297 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -37,6 +37,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; @@ -46,7 +47,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinition; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index 2be4fe12884b0..c903933a8e350 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -72,6 +72,7 @@ import org.elasticsearch.xpack.core.ml.MlStatsIndex; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.InferenceToXContentCompressor; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; @@ -85,7 +86,6 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java index 5908c550d318f..d2e5369ef4bd3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java @@ -20,6 +20,7 @@ import java.nio.file.Path; import java.util.Iterator; import java.util.List; +import java.util.concurrent.TimeoutException; import java.util.function.Consumer; public class NativePyTorchProcess extends AbstractNativeProcess implements PyTorchProcess { @@ -27,6 +28,7 @@ public class NativePyTorchProcess extends AbstractNativeProcess implements PyTor private static final String NAME = "pytorch_inference"; private final ProcessResultsParser resultsParser; + private final PyTorchProcessFactory.TimeoutRunnable afterInStreamClose; protected NativePyTorchProcess( String jobId, @@ -34,9 +36,11 @@ protected NativePyTorchProcess( ProcessPipes processPipes, int numberOfFields, List filesToDelete, + PyTorchProcessFactory.TimeoutRunnable afterInStreamClose, Consumer onProcessCrash ) { super(jobId, nativeController, processPipes, numberOfFields, filesToDelete, onProcessCrash); + this.afterInStreamClose = afterInStreamClose; this.resultsParser = new ProcessResultsParser<>(PyTorchResult.PARSER, NamedXContentRegistry.EMPTY); } @@ -71,4 +75,9 @@ public void writeInferenceRequest(BytesReference jsonRequest) throws IOException processInStream().write('\n'); processInStream().flush(); } + + @Override + protected void afterProcessInStreamClose() throws TimeoutException { + afterInStreamClose.run(); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java index 4585ca29e8d14..b26c6720ed179 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java @@ -56,6 +56,7 @@ void setProcessConnectTimeout(TimeValue processConnectTimeout) { public NativePyTorchProcess createProcess( TrainedModelDeploymentTask task, ExecutorService executorService, + TimeoutRunnable afterInStreamClose, Consumer onProcessCrash ) { ProcessPipes processPipes = new ProcessPipes( @@ -80,6 +81,7 @@ public NativePyTorchProcess createProcess( processPipes, 0, Collections.emptyList(), + afterInStreamClose, onProcessCrash ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java index 07d9e8faa22ea..507c6115a392d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java @@ -10,9 +10,19 @@ import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeoutException; import java.util.function.Consumer; public interface PyTorchProcessFactory { - PyTorchProcess createProcess(TrainedModelDeploymentTask task, ExecutorService executorService, Consumer onProcessCrash); + interface TimeoutRunnable { + void run() throws TimeoutException; + } + + PyTorchProcess createProcess( + TrainedModelDeploymentTask task, + ExecutorService executorService, + TimeoutRunnable afterInStreamClose, + Consumer onProcessCrash + ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java index 548c95d1ddd50..f2bf180943b82 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java @@ -16,10 +16,10 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; import org.elasticsearch.xpack.core.ml.utils.MlTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 8deac327c065e..658db2997485d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -1062,4 +1062,24 @@ public void clusterChanged(ClusterChangedEvent event) { resetInProgress = MlMetadata.getMlMetadata(event.state()).isResetMode(); } + /** + * Finds the memory used by open autodetect processes on the current node. + * @return Memory used by open autodetect processes on the current node. + */ + public ByteSizeValue getOpenProcessMemoryUsage() { + long memoryUsedBytes = 0; + for (ProcessContext processContext : processByAllocation.values()) { + if (processContext.getState() == ProcessContext.ProcessStateName.RUNNING) { + ModelSizeStats modelSizeStats = processContext.getAutodetectCommunicator().getModelSizeStats(); + ModelSizeStats.AssignmentMemoryBasis basis = modelSizeStats.getAssignmentMemoryBasis(); + memoryUsedBytes += switch (basis != null ? basis : ModelSizeStats.AssignmentMemoryBasis.MODEL_MEMORY_LIMIT) { + case MODEL_MEMORY_LIMIT -> modelSizeStats.getModelBytesMemoryLimit(); + case CURRENT_MODEL_BYTES -> modelSizeStats.getModelBytes(); + case PEAK_MODEL_BYTES -> Optional.ofNullable(modelSizeStats.getPeakModelBytes()).orElse(modelSizeStats.getModelBytes()); + }; + memoryUsedBytes += Job.PROCESS_MEMORY_OVERHEAD.getBytes(); + } + } + return ByteSizeValue.ofBytes(memoryUsedBytes); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java index 8ea85208a2de8..dd71800bd4f90 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java @@ -223,7 +223,7 @@ public void close() throws IOException { * Implementations can override this if they need to perform extra processing * immediately after the native process's input stream is closed. */ - protected void afterProcessInStreamClose() { + protected void afterProcessInStreamClose() throws TimeoutException { // no-op by default } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 5ba577eb90ab7..3f502c4d95cc9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -26,12 +26,12 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java index a7a9122c96606..2b206de4cf42f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; @@ -35,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunner; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.process.MlController; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java new file mode 100644 index 0000000000000..2262c21070e75 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.autoscaling.MlMemoryAutoscalingDeciderTests; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; +import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; +import org.elasticsearch.xpack.ml.dataframe.process.AnalyticsProcessManager; +import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; +import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; +import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; + +import java.util.Map; + +import static org.elasticsearch.xpack.ml.MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD; +import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class MlMetricsTests extends ESTestCase { + + public void testFindTaskStatuses() { + + PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); + MlMemoryAutoscalingDeciderTests.addJobTask("job1", "node1", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job2", "node1", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job3", "node2", JobState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job4", null, JobState.OPENING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job5", "node1", JobState.CLOSING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job6", "node2", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job7", "node2", JobState.OPENING, tasksBuilder); + addDatafeedTask("datafeed1", "node1", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed2", "node1", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed5", "node1", DatafeedState.STOPPING, tasksBuilder); + addDatafeedTask("datafeed6", "node2", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed7", "node2", DatafeedState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa1", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa2", "node2", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa3", "node1", DataFrameAnalyticsState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa4", "node2", DataFrameAnalyticsState.REINDEXING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa5", null, DataFrameAnalyticsState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa6", "node1", DataFrameAnalyticsState.ANALYZING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa7", "node1", DataFrameAnalyticsState.STOPPING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa8", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa9", null, DataFrameAnalyticsState.FAILED, tasksBuilder); + + MlMetrics.MlTaskStatusCounts counts = MlMetrics.findTaskStatuses(tasksBuilder.build()); + assertThat(counts.adOpeningCount(), is(2)); + assertThat(counts.adOpenedCount(), is(3)); + assertThat(counts.adClosingCount(), is(1)); + assertThat(counts.adFailedCount(), is(1)); + assertThat(counts.datafeedStartingCount(), is(1)); + assertThat(counts.datafeedStartedCount(), is(3)); + assertThat(counts.datafeedStoppingCount(), is(1)); + assertThat(counts.dfaStartingCount(), is(1)); + assertThat(counts.dfaStartedCount(), is(3)); + assertThat(counts.dfaReindexingCount(), is(1)); + assertThat(counts.dfaAnalyzingCount(), is(1)); + assertThat(counts.dfaStoppingCount(), is(1)); + assertThat(counts.dfaFailedCount(), is(2)); + } + + public void testFindDfaMemoryUsage() { + + PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa1", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa2", "node2", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa3", "node1", DataFrameAnalyticsState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa4", "node2", DataFrameAnalyticsState.REINDEXING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa5", null, DataFrameAnalyticsState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa6", "node1", DataFrameAnalyticsState.ANALYZING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa7", "node1", DataFrameAnalyticsState.STOPPING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa8", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa9", null, DataFrameAnalyticsState.FAILED, tasksBuilder); + + DataFrameAnalyticsManager dfaManager = new DataFrameAnalyticsManager( + Settings.EMPTY, + mock(NodeClient.class), + mock(ThreadPool.class), + mock(ClusterService.class), + mock(DataFrameAnalyticsConfigProvider.class), + mock(AnalyticsProcessManager.class), + mock(DataFrameAnalyticsAuditor.class), + mock(IndexNameExpressionResolver.class), + mock(ResultsPersisterService.class), + mock(ModelLoadingService.class), + new String[] {}, + Map.of( + "dfa1", + ByteSizeValue.ofGb(1), + "dfa3", + ByteSizeValue.ofGb(2), + "dfa6", + ByteSizeValue.ofGb(4), + "dfa7", + ByteSizeValue.ofGb(8), + "dfa8", + ByteSizeValue.ofGb(16) + ) + ); + + long bytesUsed = MlMetrics.findDfaMemoryUsage(dfaManager, tasksBuilder.build()); + assertThat(bytesUsed, is(ByteSizeValue.ofGb(29).getBytes() + 4 * DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes())); + } + + public void testFindTrainedModelAllocationCounts() { + + TrainedModelAssignmentMetadata.Builder metadataBuilder = TrainedModelAssignmentMetadata.Builder.empty(); + metadataBuilder.addNewAssignment( + "model1", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node1", new RoutingInfo(1, 1, RoutingState.STARTED, "")) + .addRoutingEntry("node2", new RoutingInfo(0, 1, RoutingState.FAILED, "")) + ); + metadataBuilder.addNewAssignment( + "model2", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node1", new RoutingInfo(2, 2, RoutingState.STARTED, "")) + ); + metadataBuilder.addNewAssignment( + "model3", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node2", new RoutingInfo(0, 1, RoutingState.STARTING, "")) + ); + + MlMetrics.TrainedModelAllocationCounts counts = MlMetrics.findTrainedModelAllocationCounts(metadataBuilder.build()); + assertThat(counts.trainedModelsTargetAllocations(), is(5)); + assertThat(counts.trainedModelsCurrentAllocations(), is(3)); + assertThat(counts.trainedModelsFailedAllocations(), is(1)); + } + + public void testFindNativeMemoryFree() { + + long bytesFree = MlMetrics.findNativeMemoryFree( + ByteSizeValue.ofMb(4000).getBytes(), + ByteSizeValue.ofMb(500).getBytes(), + ByteSizeValue.ofMb(1000).getBytes(), + ByteSizeValue.ofMb(2000).getBytes() + ); + assertThat(bytesFree, is(ByteSizeValue.ofMb(500).getBytes() - NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes())); + } + + public static void addDatafeedTask( + String datafeedId, + String nodeId, + DatafeedState datafeedState, + PersistentTasksCustomMetadata.Builder builder + ) { + builder.addTask( + MlTasks.datafeedTaskId(datafeedId), + MlTasks.DATAFEED_TASK_NAME, + new StartDatafeedAction.DatafeedParams(datafeedId, System.currentTimeMillis()), + nodeId == null ? AWAITING_LAZY_ASSIGNMENT : new PersistentTasksCustomMetadata.Assignment(nodeId, "test assignment") + ); + if (datafeedState != null) { + builder.updateTaskState(MlTasks.datafeedTaskId(datafeedId), datafeedState); + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index c54ac8ba3b84d..bf6d13ada0f94 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MachineLearningField; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.junit.Before; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java index a56ad515690cf..97fd66e284010 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.junit.Before; import java.util.Map; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index f966ac85c7a65..f08d2735be8a5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NodeAvailabilityZoneMapper; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java index 3057da83d11e9..6c5223eae4d99 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentTests; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java index 795f184a49a4d..2444134ce2920 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import org.junit.After; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java index 334fdfbb8b922..53b737b38c284 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoad; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java index 2f4640cfa38dc..40b0dd519f7d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; @@ -46,7 +47,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinition; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java index ccc7f14d2264e..fef9b07429702 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutorTests; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.junit.Before; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index aff3f006b1a8a..7a314b82024be 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -53,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats.AssignmentMemoryBasis; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.MachineLearning; @@ -815,6 +817,35 @@ public void testCreate_givenNonZeroCountsAndNoModelSnapshotNorQuantiles() { verifyNoMoreInteractions(auditor); } + public void testGetOpenProcessMemoryUsage() { + modelSnapshot = null; + quantiles = null; + dataCounts = new DataCounts("foo"); + dataCounts.setLatestRecordTimeStamp(new Date(0L)); + dataCounts.incrementProcessedRecordCount(42L); + long modelMemoryLimitBytes = ByteSizeValue.ofMb(randomIntBetween(10, 1000)).getBytes(); + long peakModelBytes = randomLongBetween(100000, modelMemoryLimitBytes - 1); + long modelBytes = randomLongBetween(1, peakModelBytes - 1); + AssignmentMemoryBasis assignmentMemoryBasis = randomFrom(AssignmentMemoryBasis.values()); + modelSizeStats = new ModelSizeStats.Builder("foo").setModelBytesMemoryLimit(modelMemoryLimitBytes) + .setPeakModelBytes(peakModelBytes) + .setModelBytes(modelBytes) + .setAssignmentMemoryBasis(assignmentMemoryBasis) + .build(); + when(autodetectCommunicator.getModelSizeStats()).thenReturn(modelSizeStats); + AutodetectProcessManager manager = createSpyManager(); + JobTask jobTask = mock(JobTask.class); + when(jobTask.getJobId()).thenReturn("foo"); + manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + + long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) { + case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes; + case CURRENT_MODEL_BYTES -> modelBytes; + case PEAK_MODEL_BYTES -> peakModelBytes; + }; + assertThat(manager.getOpenProcessMemoryUsage(), equalTo(ByteSizeValue.ofBytes(expectedSizeBytes))); + } + private AutodetectProcessManager createNonSpyManager(String jobId) { ExecutorService executorService = mock(ExecutorService.class); when(threadPool.executor(anyString())).thenReturn(executorService); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index a7c981739098b..9b5eaeb4aa6fd 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -221,6 +221,9 @@ private void searchGenericEvents( client.prepareSearch(request.indices()) .setTrackTotalHits(false) .setSize(0) + // take advantage of request cache and keep a consistent order for the same request + .setRequestCache(true) + .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .addAggregation(new MinAggregationBuilder("min_time").field("@timestamp")) .addAggregation(new MaxAggregationBuilder("max_time").field("@timestamp")) @@ -269,6 +272,9 @@ private void searchEventGroupedByStackTrace( client.prepareSearch(eventsIndex.getName()) .setTrackTotalHits(false) .setSize(0) + // take advantage of request cache and keep a consistent order for the same request + .setRequestCache(true) + .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .addAggregation(new MinAggregationBuilder("min_time").field("@timestamp")) .addAggregation(new MaxAggregationBuilder("max_time").field("@timestamp")) diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java index cb13cfd651ed3..2ccdd66089c79 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java @@ -67,6 +67,10 @@ public Batch(String name, Rule... rules) { public String name() { return name; } + + public Rule[] rules() { + return rules; + } } private Iterable> batches = null; diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 7ddd660645a7c..a3b5147988b13 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -10,6 +10,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.function.Function; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @@ -119,6 +120,16 @@ public List expectedWarnings(boolean forEmulated) { } return warnings; } + + /** + * Modifies the expected warnings. + * In some cases, we modify the query to run against multiple clusters. As a result, the line/column positions + * of the expected warnings no longer match the actual warnings. To enable reusing of spec tests, this method + * allows adjusting the expected warnings. + */ + public void adjustExpectedWarnings(Function updater) { + expectedWarnings.replaceAll(updater::apply); + } } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java index c80cf3c3d62e3..e3b631ba69c8a 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java @@ -143,23 +143,18 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openRequest).actionGet().getPointInTimeId(); try { - assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName).setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), - resp -> { - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, docCount); - } - ); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, docCount); + }); final Set allocatedNodes = internalCluster().nodesInclude(indexName); for (String allocatedNode : allocatedNodes) { internalCluster().restartNode(allocatedNode); } ensureGreen(indexName); assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName) - .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(between(1, 10)) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 461e044e732d1..1d849055c70a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -375,6 +375,7 @@ import java.io.IOException; import java.net.InetSocketAddress; +import java.security.Provider; import java.time.Clock; import java.util.ArrayList; import java.util.Arrays; @@ -1178,6 +1179,7 @@ public static List> getSettings(List securityExten // The following just apply in node mode settingsList.add(XPackSettings.FIPS_MODE_ENABLED); + settingsList.add(XPackSettings.FIPS_REQUIRED_PROVIDERS); SSLService.registerSettings(settingsList); // IP Filter settings @@ -1561,6 +1563,30 @@ static void validateForFips(Settings settings) { } }); + Set foundProviders = new HashSet<>(); + for (Provider provider : java.security.Security.getProviders()) { + foundProviders.add(provider.getName().toLowerCase(Locale.ROOT)); + if (logger.isTraceEnabled()) { + logger.trace("Security Provider: " + provider.getName() + ", Version: " + provider.getVersionStr()); + provider.entrySet().forEach(entry -> { logger.trace("\t" + entry.getKey()); }); + } + } + + final List requiredProviders = XPackSettings.FIPS_REQUIRED_PROVIDERS.get(settings); + logger.info("JVM Security Providers: " + foundProviders); + if (requiredProviders != null && requiredProviders.isEmpty() == false) { + List unsatisfiedProviders = requiredProviders.stream() + .map(s -> s.toLowerCase(Locale.ROOT)) + .filter(element -> foundProviders.contains(element) == false) + .toList(); + + if (unsatisfiedProviders.isEmpty() == false) { + String errorMessage = "Could not find required FIPS security provider: " + unsatisfiedProviders; + logger.error(errorMessage); + validationErrors.add(errorMessage); + } + } + if (validationErrors.isEmpty() == false) { final StringBuilder sb = new StringBuilder(); sb.append("Validation for FIPS 140 mode failed: \n"); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 8a0a9c09b7d1a..e4e9bc453ee83 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; import org.elasticsearch.action.update.TransportUpdateAction; @@ -471,6 +472,11 @@ private void authorizeAction( } else if (isIndexAction(action)) { final Metadata metadata = clusterService.state().metadata(); final AsyncSupplier resolvedIndicesAsyncSupplier = new CachingAsyncSupplier<>(resolvedIndicesListener -> { + if (request instanceof SearchRequest searchRequest && searchRequest.pointInTimeBuilder() != null) { + var resolvedIndices = indicesAndAliasesResolver.resolvePITIndices(searchRequest); + resolvedIndicesListener.onResponse(resolvedIndices); + return; + } final ResolvedIndices resolvedIndices = IndicesAndAliasesResolver.tryResolveWithoutWildcards(action, request); if (resolvedIndices != null) { resolvedIndicesListener.onResponse(resolvedIndices); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 16258e71e85b8..a4163b6f10fc0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -11,6 +11,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.search.SearchContextId; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -176,6 +178,24 @@ static ResolvedIndices resolveIndicesAndAliasesWithoutWildcards(String action, I return new ResolvedIndices(localIndices, List.of()); } + /** + * Returns the resolved indices from the {@link SearchContextId} within the provided {@link SearchRequest}. + */ + ResolvedIndices resolvePITIndices(SearchRequest request) { + assert request.pointInTimeBuilder() != null; + var indices = SearchContextId.decodeIndices(request.pointInTimeBuilder().getEncodedId()); + final ResolvedIndices split; + if (request.allowsRemoteIndices()) { + split = remoteClusterResolver.splitLocalAndRemoteIndexNames(indices); + } else { + split = new ResolvedIndices(Arrays.asList(indices), Collections.emptyList()); + } + if (split.isEmpty()) { + return new ResolvedIndices(List.of(NO_INDEX_PLACEHOLDER), Collections.emptyList()); + } + return split; + } + private static void throwOnUnexpectedWildcards(String action, String[] indices) { final List wildcards = Stream.of(indices).filter(Regex::isSimpleMatchPattern).toList(); assert wildcards.isEmpty() == false : "we already know that there's at least one wildcard in the indices"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index ed5ed53894b6c..18929c70cbe7d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -582,6 +582,32 @@ public void testValidateForFipsInvalidPasswordHashingAlgorithm() { assertThat(iae.getMessage(), containsString("Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM.")); } + public void testValidateForFipsRequiredProvider() { + final Settings settings = Settings.builder() + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) + .putList(XPackSettings.FIPS_REQUIRED_PROVIDERS.getKey(), List.of("BCFIPS")) + .build(); + if (inFipsJvm()) { + Security.validateForFips(settings); + // no exceptions since gradle has wired in the bouncy castle FIPS provider + } else { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [bcfips]")); + } + + final Settings settings2 = Settings.builder() + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) + .putList(XPackSettings.FIPS_REQUIRED_PROVIDERS.getKey(), List.of("junk0", "BCFIPS", "junk1", "junk2")) + .build(); + if (inFipsJvm()) { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings2)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [junk0, junk1, junk2]")); + } else { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings2)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [junk0, bcfips, junk1, junk2]")); + } + } + public void testValidateForFipsMultipleValidationErrors() { final Settings settings = Settings.builder() .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 4cabe5a8ec3ba..ad05cb20ffbbf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authz; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.DocWriteRequest; @@ -61,6 +62,7 @@ import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.ParsedScrollId; +import org.elasticsearch.action.search.SearchContextId; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchTransportService; @@ -101,6 +103,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.bulk.stats.BulkOperationListener; @@ -110,7 +113,12 @@ import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -1229,6 +1237,72 @@ public void testSearchAgainstIndex() { verifyNoMoreInteractions(auditTrail); } + public void testSearchPITAgainstIndex() { + RoleDescriptor role = new RoleDescriptor( + "search_index", + null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("index-*").privileges("read").build() }, + null + ); + roleMap.put(role.getName(), role); + final Authentication authentication = createAuthentication(new User("test search user", role.getName())); + + final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); + final String indexName = "index-" + randomAlphaOfLengthBetween(1, 5); + + final ClusterState clusterState = mockMetadataWithIndex(indexName); + final IndexMetadata indexMetadata = clusterState.metadata().index(indexName); + + PointInTimeBuilder pit = new PointInTimeBuilder(createEncodedPIT(indexMetadata.getIndex())); + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder().pointInTimeBuilder(pit)) + .allowPartialSearchResults(false); + final ShardSearchRequest shardRequest = new ShardSearchRequest( + new OriginalIndices(new String[] { indexName }, searchRequest.indicesOptions()), + searchRequest, + new ShardId(indexMetadata.getIndex(), 0), + 0, + 1, + AliasFilter.EMPTY, + 1.0f, + System.currentTimeMillis(), + null + ); + this.setFakeOriginatingAction = false; + authorize(authentication, TransportSearchAction.TYPE.name(), searchRequest, true, () -> { + verify(rolesStore).getRoles(Mockito.same(authentication), Mockito.any()); + IndicesAccessControl iac = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + // Successful search action authorization should set a parent authorization header. + assertThat(securityContext.getParentAuthorization().action(), equalTo(TransportSearchAction.TYPE.name())); + // Within the action handler, execute a child action (the query phase of search) + authorize(authentication, SearchTransportService.QUERY_ACTION_NAME, shardRequest, false, () -> { + // This child action triggers a second interaction with the role store (which is cached) + verify(rolesStore, times(2)).getRoles(Mockito.same(authentication), Mockito.any()); + // But it does not create a new IndicesAccessControl + assertThat(threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY), sameInstance(iac)); + // The parent authorization header should only be present for direct child actions + // and not be carried over for a child of a child actions. + // Meaning, only query phase action should be pre-authorized in this case and potential sub-actions should not. + assertThat(securityContext.getParentAuthorization(), nullValue()); + }); + }); + assertThat(searchRequest.indices().length, equalTo(0)); + verify(auditTrail).accessGranted( + eq(requestId), + eq(authentication), + eq(TransportSearchAction.TYPE.name()), + eq(searchRequest), + authzInfoRoles(new String[] { role.getName() }) + ); + verify(auditTrail).accessGranted( + eq(requestId), + eq(authentication), + eq(SearchTransportService.QUERY_ACTION_NAME), + eq(shardRequest), + authzInfoRoles(new String[] { role.getName() }) + ); + verifyNoMoreInteractions(auditTrail); + } + public void testScrollRelatedRequestsAllowed() { RoleDescriptor role = new RoleDescriptor( "a_all", @@ -3545,6 +3619,26 @@ static AuthorizationInfo authzInfoRoles(String[] expectedRoles) { return ArgumentMatchers.argThat(new RBACAuthorizationInfoRoleMatcher(expectedRoles)); } + private static class TestSearchPhaseResult extends SearchPhaseResult { + final DiscoveryNode node; + + TestSearchPhaseResult(ShardSearchContextId contextId, DiscoveryNode node) { + this.contextId = contextId; + this.node = node; + } + } + + private static String createEncodedPIT(Index index) { + DiscoveryNode node1 = DiscoveryNodeUtils.create("node_1"); + TestSearchPhaseResult testSearchPhaseResult1 = new TestSearchPhaseResult(new ShardSearchContextId("a", 1), node1); + testSearchPhaseResult1.setSearchShardTarget( + new SearchShardTarget("node_1", new ShardId(index.getName(), index.getUUID(), 0), null) + ); + List results = new ArrayList<>(); + results.add(testSearchPhaseResult1); + return SearchContextId.encode(results, Collections.emptyMap(), TransportVersion.current()); + } + private static class RBACAuthorizationInfoRoleMatcher implements ArgumentMatcher { private final String[] wanted; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java index 8364ad3d4c027..7c5fa5053222b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java @@ -25,16 +25,18 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.xpack.vectortile.SpatialGeometryFormatterExtension; import org.elasticsearch.xpack.vectortile.feature.FeatureFactory; -import org.hamcrest.Matchers; import java.io.IOException; import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + public class GeoShapeWithDocValuesFieldTypeTests extends FieldTypeTestCase { - public void testFetchSourceValue() throws IOException { + public void testFetchSourceValue() throws Exception { final GeoFormatterFactory geoFormatterFactory = new GeoFormatterFactory<>( new SpatialGeometryFormatterExtension().getGeometryFormatterFactories() ); @@ -53,26 +55,43 @@ public void testFetchSourceValue() throws IOException { String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.0 15.0)"; String wktMalformed = "POINT foo"; + byte[] wkbLine = WellKnownBinary.toWKB( + WellKnownText.fromWKT(StandardValidator.NOOP, false, wktLineString), + ByteOrder.LITTLE_ENDIAN + ); + byte[] wkbPoint = WellKnownBinary.toWKB(WellKnownText.fromWKT(StandardValidator.NOOP, false, wktPoint), ByteOrder.LITTLE_ENDIAN); // Test a single shape in geojson format. Object sourceValue = jsonLineString; assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLine)); // Test a malformed single shape in geojson format sourceValue = jsonMalformed; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of shapes in geojson format. sourceValue = List.of(jsonLineString, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a list of shapes including one malformed in geojson format sourceValue = List.of(jsonLineString, jsonMalformed, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a single shape in wkt format. sourceValue = wktLineString; @@ -109,26 +128,31 @@ public void testFetchStoredValue() throws IOException { geoFormatterFactory ).setStored(true).build(MapperBuilderContext.root(randomBoolean(), false)).fieldType(); - ByteOrder byteOrder = randomBoolean() ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; - Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.0 15.0)"; BytesRef wkbLineString = new BytesRef( - WellKnownBinary.toWKB(new Line(new double[] { 42.0, 30.0 }, new double[] { 27.1, 50.0 }), byteOrder) + WellKnownBinary.toWKB(new Line(new double[] { 42.0, 30.0 }, new double[] { 27.1, 50.0 }), ByteOrder.LITTLE_ENDIAN) ); - BytesRef wkbPoint = new BytesRef(WellKnownBinary.toWKB(new Point(14.0, 15.0), byteOrder)); + BytesRef wkbPoint = new BytesRef(WellKnownBinary.toWKB(new Point(14.0, 15.0), ByteOrder.LITTLE_ENDIAN)); // Test a single shape in wkb format. List storedValues = List.of(wkbLineString); assertEquals(List.of(jsonLineString), fetchStoredValue(mapper, storedValues, null)); assertEquals(List.of(wktLineString), fetchStoredValue(mapper, storedValues, "wkt")); + List wkb = fetchStoredValue(mapper, storedValues, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLineString.bytes)); // Test a list of shapes in wkb format. storedValues = List.of(wkbLineString, wkbPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchStoredValue(mapper, storedValues, null)); assertEquals(List.of(wktLineString, wktPoint), fetchStoredValue(mapper, storedValues, "wkt")); + wkb = fetchStoredValue(mapper, storedValues, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLineString.bytes)); + assertThat(wkb.get(1), equalTo(wkbPoint.bytes)); } public void testFetchVectorTile() throws IOException { @@ -180,9 +204,9 @@ private void fetchVectorTile(Geometry geometry) throws IOException { // happen that the geometry is out of range (close to the poles). features = List.of(); } - assertThat(features.size(), Matchers.equalTo(sourceValue.size())); + assertThat(features.size(), equalTo(sourceValue.size())); for (int i = 0; i < features.size(); i++) { - assertThat(sourceValue.get(i), Matchers.equalTo(features.get(i))); + assertThat(sourceValue.get(i), equalTo(features.get(i))); } } @@ -308,10 +332,10 @@ private void assertFetchSourceMVT(Object sourceValue, String mvtEquivalentAsWKT) final int extent = randomIntBetween(256, 4096); List mvtExpected = fetchSourceValue(mapper, mvtEquivalentAsWKT, "mvt(0/0/0@" + extent + ")"); List mvt = fetchSourceValue(mapper, sourceValue, "mvt(0/0/0@" + extent + ")"); - assertThat(mvt.size(), Matchers.equalTo(1)); - assertThat(mvt.size(), Matchers.equalTo(mvtExpected.size())); - assertThat(mvtExpected.get(0), Matchers.instanceOf(byte[].class)); - assertThat(mvt.get(0), Matchers.instanceOf(byte[].class)); - assertThat((byte[]) mvt.get(0), Matchers.equalTo((byte[]) mvtExpected.get(0))); + assertThat(mvt.size(), equalTo(1)); + assertThat(mvt.size(), equalTo(mvtExpected.size())); + assertThat(mvtExpected.get(0), instanceOf(byte[].class)); + assertThat(mvt.get(0), instanceOf(byte[].class)); + assertThat((byte[]) mvt.get(0), equalTo((byte[]) mvtExpected.get(0))); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java index ed902b0f8cfe1..6524860e9438c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java @@ -7,14 +7,19 @@ package org.elasticsearch.xpack.spatial.index.mapper; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; +import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class PointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { @@ -24,26 +29,39 @@ public void testFetchSourceValue() throws IOException { String wktPoint = "POINT (42.0 27.1)"; Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); String otherWktPoint = "POINT (30.0 50.0)"; + byte[] wkbPoint = WellKnownBinary.toWKB(new Point(42.0, 27.1), ByteOrder.LITTLE_ENDIAN); + byte[] otherWkbPoint = WellKnownBinary.toWKB(new Point(30.0, 50.0), ByteOrder.LITTLE_ENDIAN); // Test a single point in [x, y] array format. Object sourceValue = List.of(42.0, 27.1); assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a single point in "x, y" string format. sourceValue = "42.0,27.1"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a malformed single point sourceValue = "foo"; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of points in [x, y] array format. sourceValue = List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a single point in well-known text format. sourceValue = "POINT (42.0 27.1)"; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java index 1050c9acef11a..c7d87a6c6e8f5 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java @@ -7,18 +7,23 @@ package org.elasticsearch.xpack.spatial.index.mapper; +import org.elasticsearch.geometry.utils.StandardValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; -import java.io.IOException; +import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class ShapeFieldTypeTests extends FieldTypeTestCase { - public void testFetchSourceValue() throws IOException { + public void testFetchSourceValue() throws Exception { MappedFieldType mapper = new ShapeFieldMapper.Builder("field", IndexVersion.current(), false, true).build( MapperBuilderContext.root(false, false) ).fieldType(); @@ -29,26 +34,43 @@ public void testFetchSourceValue() throws IOException { String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.3 15.0)"; String wktMalformed = "POINT foo"; + byte[] wkbLine = WellKnownBinary.toWKB( + WellKnownText.fromWKT(StandardValidator.NOOP, false, wktLineString), + ByteOrder.LITTLE_ENDIAN + ); + byte[] wkbPoint = WellKnownBinary.toWKB(WellKnownText.fromWKT(StandardValidator.NOOP, false, wktPoint), ByteOrder.LITTLE_ENDIAN); // Test a single shape in geojson format. Object sourceValue = jsonLineString; assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLine)); // Test a malformed single shape in geojson format sourceValue = jsonMalformed; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of shapes in geojson format. sourceValue = List.of(jsonLineString, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a list of shapes including one malformed in geojson format sourceValue = List.of(jsonLineString, jsonMalformed, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a single shape in wkt format. sourceValue = wktLineString; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index a0da67f3006a3..936f4aa23cd57 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -161,6 +161,7 @@ private void searchWithPointInTime(SearchRequest search, ActionListener { String pitId = openPointInTimeResponse.getPointInTimeId(); + search.indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS); search.indices(Strings.EMPTY_ARRAY); search.source().pointInTimeBuilder(new PointInTimeBuilder(pitId)); ActionListener closePitOnErrorListener = wrap(searchResponse -> { @@ -201,13 +202,14 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfig source.timeout(cfg.requestTimeout()); SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); - searchRequest.indices(indices); + if (source.pointInTimeBuilder() == null) { + searchRequest.indices(indices); + searchRequest.indicesOptions( + includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS + ); + } searchRequest.source(source); searchRequest.allowPartialSearchResults(cfg.allowPartialSearchResults()); - searchRequest.indicesOptions( - includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS - ); - return searchRequest; } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml index d0f7c7636582f..4a0d6387683ac 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml @@ -29,6 +29,16 @@ setup: - do: indices.refresh: { } +--- +"Counted keyword is searchable by default": + - do: + field_caps: + index: test-events + fields: [ events ] + + - match: { fields.events.counted_keyword.searchable: true } + - match: { fields.events.counted_keyword.aggregatable: true } + --- "Counted Terms agg": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml new file mode 100644 index 0000000000000..1fe48207b5586 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml @@ -0,0 +1,54 @@ +setup: + + - skip: + version: " - 8.12.99" + reason: "index option on counted_keyword was added in 8.13" + + - do: + indices.create: + index: test-events-no-index + body: + mappings: + properties: + events: + type: counted_keyword + index: false + + - do: + index: + index: test-events-no-index + id: "1" + body: { "events": [ "a", "a", "b" ] } + + + - do: + indices.refresh: { } + +--- +"Counted keyword with index false is not searchable": + - do: + field_caps: + index: test-events-no-index + fields: [ events ] + + - match: { fields.events.counted_keyword.searchable: false } + - match: { fields.events.counted_keyword.aggregatable: true } + +--- +"Counted Terms agg only relies on doc values": +# although the field is not indexed, the counted_terms agg should still work + - do: + search: + index: test-events-no-index + body: + size: 0 + aggs: + event_terms: + counted_terms: + field: events + + - match: { aggregations.event_terms.buckets.0.key: "a" } + - match: { aggregations.event_terms.buckets.0.doc_count: 2 } + - match: { aggregations.event_terms.buckets.1.key: "b" } + - match: { aggregations.event_terms.buckets.1.doc_count: 1 } + - length: { aggregations.event_terms.buckets: 2 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 102885af53ad7..6cbc9a225588b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -1,10 +1,8 @@ --- "Coalesce and to_ip functions": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102871" - # version: " - 8.11.99" - # reason: "fixes in 8.12 or later" + version: " - 8.11.99" + reason: "fixes in 8.12 or later" features: warnings - do: bulk: @@ -129,10 +127,10 @@ --- -"null MappedFieldType on single value detection (https://github.com/elastic/elasticsearch/issues/103141)": +"null MappedFieldType on single value detection #103141": - skip: - version: all - reason: "AwaitsFix fix https://github.com/elastic/elasticsearch/issues/103561" + version: " - 8.12.99" + reason: "fixes in 8.13 or later" - do: indices.create: index: npe_single_value_1 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml index 10de6e2c22d9e..1df34a64f860a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml @@ -2,7 +2,7 @@ "Test valid job config": - do: ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -21,7 +21,7 @@ - do: catch: /.data_description. failed to parse field .format./ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -38,7 +38,7 @@ "Test valid job config with job ID": - do: ml.validate: - body: > + body: > { "job_id": "validate-job-config-with-job-id", "analysis_config": { @@ -58,7 +58,7 @@ - do: catch: /Invalid job_id; '_' can contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/ ml.validate: - body: > + body: > { "job_id": "_", "analysis_config": { @@ -78,7 +78,7 @@ - do: catch: /illegal_argument_exception/ ml.validate: - body: > + body: > { "model_snapshot_id": "wont-create-with-this-setting", "analysis_config" : { @@ -92,7 +92,7 @@ - do: catch: /The job is configured with fields \[model_snapshot_id\] that are illegal to set at job creation/ ml.validate: - body: > + body: > { "model_snapshot_id": "wont-create-with-this-setting", "analysis_config" : { @@ -109,7 +109,7 @@ - do: catch: /illegal_argument_exception.*Duplicate detectors are not allowed/ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -126,7 +126,7 @@ - do: catch: /illegal_argument_exception.*Duplicate detectors are not allowed/ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml index e10852b4c93d6..2d3cfa764e98c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -240,21 +240,6 @@ setup: "deduce_mappings": false } } - - match: { preview.0.airline: foo } - - match: { preview.0.by-hour: "2017-02-18T00:00:00.000Z" } - - match: { preview.0.avg_response: 1.0 } - - match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" } - - match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" } - - match: { preview.1.airline: bar } - - match: { preview.1.by-hour: "2017-02-18T01:00:00.000Z" } - - match: { preview.1.avg_response: 42.0 } - - match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" } - - match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" } - - match: { preview.2.airline: foo } - - match: { preview.2.by-hour: "2017-02-18T01:00:00.000Z" } - - match: { preview.2.avg_response: 42.0 } - - match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" } - - match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" } - match: { generated_dest_index.mappings.properties: {} } --- diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index ef34db62e5e03..8f129789d46b7 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -53,16 +54,29 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' + maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' + maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' + maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } + +//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses +// the previous minor version, that setting is not available when running in FIPS until 8.14. +def maybeDisableForFips(task) { + if (BuildParams.inFipsJvm) { + if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { + task.enabled = false + } + } +} diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java index 00b247321b132..49ee0f8bbd9a9 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDestIndexIT.java @@ -174,7 +174,7 @@ private void testTransformDestIndexMappings(String transformId, boolean deduceMa } } }""", destIndex); - Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_mappings_template"); + Request createIndexTemplateRequest = new Request("PUT", "_template/test_dest_index_no_deduce_template"); createIndexTemplateRequest.setJsonEntity(destIndexTemplate); createIndexTemplateRequest.setOptions(expectWarnings(RestPutIndexTemplateAction.DEPRECATION_WARNING)); Map createIndexTemplateResponse = entityAsMap(client().performRequest(createIndexTemplateRequest)); @@ -253,9 +253,6 @@ private void testTransformDestIndexMappings(String transformId, boolean deduceMa ) ) ); - Map searchResult = getAsMap(destIndex + "/_search?q=reviewer:user_0"); - String timestamp = (String) ((List) XContentMapValues.extractValue("hits.hits._source.timestamp", searchResult)).get(0); - assertThat(timestamp, is(equalTo("2017-01-10T10:10:10.000Z"))); } private static void assertAliases(String index, String... aliases) throws IOException { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 1c98b8b34d0d1..925e6d5381770 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -1383,11 +1383,8 @@ private List previewWithOffset(String offset) throws IOException { } } } - }, - "settings": { - "deduce_mappings": %s } - }""", REVIEWS_INDEX_NAME, offset, randomBoolean()); + }""", REVIEWS_INDEX_NAME, offset); createPreviewRequest.setJsonEntity(config); Map previewTransformResponse = entityAsMap(client().performRequest(createPreviewRequest)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java index 935ff04c47d85..98777b47543cb 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransformUpdater.java @@ -299,7 +299,7 @@ private static void updateTransformConfiguration( TransformAuditor auditor, IndexNameExpressionResolver indexNameExpressionResolver, TransformConfig config, - Map destIndexMappings, + Map mappings, SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ClusterState clusterState, Settings destIndexSettings, @@ -355,7 +355,7 @@ private static void updateTransformConfiguration( clusterState, config, destIndexSettings, - destIndexMappings, + mappings, createDestinationListener ); } else { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 4eded1aa0b5a6..8e0a935ffaa53 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -47,7 +47,6 @@ import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.DestAlias; -import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; @@ -66,7 +65,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static java.util.Collections.emptyMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.DUMMY_DEST_INDEX_FOR_PREVIEW; import static org.elasticsearch.xpack.transform.utils.SecondaryAuthorizationUtils.getSecurityHeadersPreferringSecondary; @@ -155,7 +153,6 @@ protected void doExecute(Task task, Request request, ActionListener li config.getDestination().getIndex(), config.getDestination().getAliases(), config.getSyncConfig(), - config.getSettings(), listener ), listener::onFailure @@ -211,7 +208,6 @@ private void getPreview( String dest, List aliases, SyncConfig syncConfig, - SettingsConfig settingsConfig, ActionListener listener ) { Client parentTaskClient = new ParentTaskAssigningClient(client, parentTaskId); @@ -289,17 +285,12 @@ private void getPreview( }, listener::onFailure); ActionListener> deduceMappingsListener = ActionListener.wrap(deducedMappings -> { - if (Boolean.FALSE.equals(settingsConfig.getDeduceMappings())) { - mappings.set(emptyMap()); - } else { - mappings.set(deducedMappings); - } + mappings.set(deducedMappings); function.preview( parentTaskClient, timeout, filteredHeaders, source, - // Use deduced mappings for generating preview even if "settings.deduce_mappings" is set to false deducedMappings, NUMBER_OF_PREVIEW_BUCKETS, previewListener diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 2c9fc8ffce5bf..4c86aed335ac1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -109,7 +109,7 @@ protected void masterOperation(Task task, Request request, ClusterState clusterS // <3> Create the transform ActionListener validateTransformListener = ActionListener.wrap( - unusedValidationResponse -> putTransform(request, listener), + validationResponse -> putTransform(request, listener), listener::onFailure ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java index fe3d4ede898bc..a08612fa4be72 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java @@ -40,7 +40,6 @@ import java.util.Map; import java.util.Set; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toMap; @@ -139,7 +138,7 @@ public static void createDestinationIndex( if (dest.length == 0) { TransformDestIndexSettings generatedDestIndexSettings = createTransformDestIndexSettings( destIndexSettings, - Boolean.FALSE.equals(config.getSettings().getDeduceMappings()) ? emptyMap() : destIndexMappings, + destIndexMappings, config.getId(), Clock.systemUTC() ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 36cd79ba87616..1b8d14c6cdc2f 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -288,13 +288,13 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene SchemaUtil.getDestinationFieldMappings(client, getConfig().getDestination().getIndex(), fieldMappingsListener); } - void validate(ActionListener listener) { + void validate(ActionListener listener) { ClientHelper.executeAsyncWithOrigin( client, ClientHelper.TRANSFORM_ORIGIN, ValidateTransformAction.INSTANCE, new ValidateTransformAction.Request(transformConfig, false, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT), - listener + ActionListener.wrap(response -> listener.onResponse(null), listener::onFailure) ); } @@ -519,16 +519,23 @@ private void injectPointInTimeIfNeeded( void doSearch(Tuple namedSearchRequest, ActionListener listener) { String name = namedSearchRequest.v1(); - SearchRequest searchRequest = namedSearchRequest.v2(); + SearchRequest originalRequest = namedSearchRequest.v2(); // We want to treat a request to search 0 indices as a request to do nothing, not a request to search all indices - if (searchRequest.indices().length == 0) { - logger.debug("[{}] Search request [{}] optimized to noop; searchRequest [{}]", getJobId(), name, searchRequest); + if (originalRequest.indices().length == 0) { + logger.debug("[{}] Search request [{}] optimized to noop; searchRequest [{}]", getJobId(), name, originalRequest); listener.onResponse(null); return; } - logger.trace("searchRequest: [{}]", searchRequest); - PointInTimeBuilder pit = searchRequest.pointInTimeBuilder(); + final SearchRequest searchRequest; + PointInTimeBuilder pit = originalRequest.pointInTimeBuilder(); + if (pit != null) { + // remove the indices from the request, they will be derived from the provided pit + searchRequest = new SearchRequest(originalRequest).indices(new String[0]).indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS); + } else { + searchRequest = originalRequest; + } + logger.trace("searchRequest: [{}]", searchRequest); ClientHelper.executeWithHeadersAsync( transformConfig.getHeaders(), @@ -555,13 +562,13 @@ void doSearch(Tuple namedSearchRequest, ActionListener namedSearchRequest, ActionListener { @@ -177,7 +174,7 @@ public TransformIndexer( abstract void persistState(TransformState state, ActionListener listener); - abstract void validate(ActionListener listener); + abstract void validate(ActionListener listener); @Override protected String getJobId() { @@ -268,8 +265,6 @@ protected void onStart(long now, ActionListener listener) { return; } - SetOnce> deducedDestIndexMappings = new SetOnce<>(); - ActionListener finalListener = ActionListener.wrap(r -> { try { // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory @@ -331,14 +326,8 @@ protected void onStart(long now, ActionListener listener) { } }, listener::onFailure); - ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { - if (destIndexMappings.isEmpty() == false) { - // If we managed to fetch destination index mappings, we use them from now on ... - this.fieldMappings = destIndexMappings; - } else { - // ... otherwise we fall back to index mappings deduced based on source indices - this.fieldMappings = deducedDestIndexMappings.get(); - } + ActionListener> fieldMappingsListener = ActionListener.wrap(mappings -> { + this.fieldMappings = mappings; configurationReadyListener.onResponse(null); }, listener::onFailure); @@ -349,8 +338,7 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); // If we are continuous, we will want to verify we have the latest stored configuration - ActionListener changedSourceListener = ActionListener.wrap(validationResponse -> { - deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); + ActionListener changedSourceListener = ActionListener.wrap(r -> { if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { if (transformConfig.equals(config) && fieldMappings != null) { @@ -389,7 +377,7 @@ protected void onStart(long now, ActionListener listener) { if (hasChanged) { context.setChangesLastDetectedAt(instantOfTrigger); logger.debug("[{}] source has changed, triggering new indexer run.", getJobId()); - changedSourceListener.onResponse(new ValidateTransformAction.Response(emptyMap())); + changedSourceListener.onResponse(null); } else { logger.trace("[{}] source has not changed, finish indexer early.", getJobId()); // No changes, stop executing @@ -408,7 +396,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; context.setLastSearchTime(instantOfTrigger); context.setChangesLastDetectedAt(instantOfTrigger); - changedSourceListener.onResponse(new ValidateTransformAction.Response(emptyMap())); + changedSourceListener.onResponse(null); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java index d403799a62a0f..ec3b5d28fb561 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java @@ -43,6 +43,7 @@ import java.util.Set; import java.util.stream.Stream; +import static java.util.Collections.emptyMap; import static java.util.stream.Collectors.toList; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -92,6 +93,10 @@ public void deduceMappings( SourceConfig sourceConfig, final ActionListener> listener ) { + if (Boolean.FALSE.equals(settings.getDeduceMappings())) { + listener.onResponse(emptyMap()); + return; + } SchemaUtil.deduceMappings( client, headers, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index c8090ee301c38..b1c9edc0fab0a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -59,7 +59,6 @@ import java.time.Clock; import java.time.Instant; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -420,8 +419,9 @@ public void testHandlePitIndexNotFound() throws InterruptedException { try (var threadPool = createThreadPool()) { final var client = new PitMockClient(threadPool, true); ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); - SearchRequest searchRequest = new SearchRequest("deleted-index"); - searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); + SearchRequest searchRequest = new SearchRequest("deleted-index").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id_on_deleted_index")) + ); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); this.assertAsync(listener -> indexer.doSearch(namedSearchRequest, listener), response -> { // if the pit got deleted, we know it retried @@ -433,8 +433,9 @@ public void testHandlePitIndexNotFound() throws InterruptedException { try (var threadPool = createThreadPool()) { final var client = new PitMockClient(threadPool, true); ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); - SearchRequest searchRequest = new SearchRequest("essential-deleted-index"); - searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); + SearchRequest searchRequest = new SearchRequest("essential-deleted-index").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id_essential-deleted-index")) + ); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); indexer.doSearch(namedSearchRequest, ActionListener.wrap(r -> fail("expected a failure, got response"), e -> { assertTrue(e instanceof IndexNotFoundException); @@ -521,14 +522,16 @@ protected void listener.onResponse((Response) response); return; } else if (request instanceof SearchRequest searchRequest) { - // if pit is used and deleted-index is given throw index not found - if (searchRequest.pointInTimeBuilder() != null && Arrays.binarySearch(searchRequest.indices(), "deleted-index") >= 0) { + if (searchRequest.pointInTimeBuilder() != null + && searchRequest.pointInTimeBuilder().getEncodedId().equals("the_pit_id_on_deleted_index")) { listener.onFailure(new IndexNotFoundException("deleted-index")); return; } - if (Arrays.binarySearch(searchRequest.indices(), "essential-deleted-index") >= 0) { + if ((searchRequest.pointInTimeBuilder() != null + && searchRequest.pointInTimeBuilder().getEncodedId().equals("the_pit_id_essential-deleted-index")) + || (searchRequest.indices().length > 0 && searchRequest.indices()[0].equals("essential-deleted-index"))) { listener.onFailure(new IndexNotFoundException("essential-deleted-index")); return; } @@ -562,7 +565,6 @@ protected void } return; } - super.doExecute(action, request, listener); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index e65f6a0e34694..9e72a92da5bee 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; -import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -249,7 +248,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } } @@ -336,7 +335,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index ee86f2ca6fcf4..372aef3d0eea7 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; -import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -269,7 +268,7 @@ void persistState(TransformState state, ActionListener listener) { } @Override - void validate(ActionListener listener) { + void validate(ActionListener listener) { listener.onResponse(null); } } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index cce18a4bd1579..54b455d483b9a 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -8,7 +8,7 @@ apply plugin: 'elasticsearch.rest-resources' restResources { restApi { include '_common', 'bulk', 'field_caps', 'security', 'search', 'clear_scroll', 'scroll', 'async_search', 'cluster', - 'indices', 'open_point_in_time', 'close_point_in_time', 'terms_enum' + 'indices', 'open_point_in_time', 'close_point_in_time', 'terms_enum', 'esql' } } @@ -23,6 +23,8 @@ def fulfillingCluster = testClusters.register('fulfilling-cluster') { module ':modules:data-streams' module ':x-pack:plugin:mapper-constant-keyword' module ':x-pack:plugin:async-search' + module ':x-pack:plugin:ql' + module ':x-pack:plugin:esql' user username: "test_user", password: "x-pack-test-password" } @@ -34,6 +36,8 @@ def queryingCluster = testClusters.register('querying-cluster') { module ':modules:data-streams' module ':x-pack:plugin:mapper-constant-keyword' module ':x-pack:plugin:async-search' + module ':x-pack:plugin:ql' + module ':x-pack:plugin:esql' setting 'cluster.remote.connections_per_cluster', "1" user username: "test_user", password: "x-pack-test-password" diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/fulfilling_cluster/10_basic.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/fulfilling_cluster/10_basic.yml index e91a87b65c013..36002f3cde470 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/fulfilling_cluster/10_basic.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/fulfilling_cluster/10_basic.yml @@ -23,7 +23,7 @@ setup: "indices": [ { "names": ["single_doc_index", "secure_alias", "test_index", "aliased_test_index", "field_caps_index_1", - "field_caps_index_3", "point_in_time_index", "simple-data-stream1", "simple-data-stream2"], + "field_caps_index_3", "point_in_time_index", "simple-data-stream1", "simple-data-stream2", "esql_index"], "privileges": ["read", "read_cross_cluster"] } ] @@ -46,7 +46,7 @@ setup: "indices": [ { "names": ["single_doc_index", "secure_alias", "test_index", "aliased_test_index", "field_caps_index_1", - "field_caps_index_3", "point_in_time_index", "simple-data-stream1", "simple-data-stream2"], + "field_caps_index_3", "point_in_time_index", "simple-data-stream1", "simple-data-stream2", "esql_index"], "privileges": ["read", "read_cross_cluster"] } ] @@ -429,3 +429,31 @@ setup: - '{"foo": "foo"}' - '{"index": {"_index": "terms_enum_index"}}' - '{"foo": "foobar"}' + + - do: + indices.create: + index: esql_index + body: + mappings: + properties: + since: + type: date + format: "yyyy-MM-dd" + cost: + type: long + tag: + type: keyword + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "esql_index"}}' + - '{"since" : "2023-01-01", "cost": 1000, "tag": "computer"}' + - '{"index": {"_index": "esql_index"}}' + - '{ "since" : "2023-01-02", "cost": 1200, "tag": "computer"}' + - '{"index": {"_index": "esql_index"}}' + - '{"since" : "2023-01-03", "cost": 450, "tag": "tablet"}' + - '{"index": {"_index": "esql_index"}}' + - '{"since" : "2023-01-04", "cost": 100, "tag": "headphone"}' + - '{"index": {"_index": "esql_index"}}' + - '{"since" : "2023-01-05", "cost": 20, "tag": "headphone"}' diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/100_resolve_index.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/100_resolve_index.yml index 36ea0b65f2aa5..b9dbb0a070af4 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/100_resolve_index.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/100_resolve_index.yml @@ -26,23 +26,25 @@ - match: {indices.4.name: my_remote_cluster:closed_index} - match: {indices.4.aliases.0: aliased_closed_index} - match: {indices.4.attributes.0: closed} - - match: {indices.5.name: my_remote_cluster:field_caps_index_1} - - match: {indices.5.attributes.0: open} - - match: {indices.6.name: my_remote_cluster:field_caps_index_3} + - match: {indices.5.name: my_remote_cluster:esql_index } + - match: {indices.5.attributes.0: open } + - match: {indices.6.name: my_remote_cluster:field_caps_index_1} - match: {indices.6.attributes.0: open} - - match: {indices.7.name: my_remote_cluster:point_in_time_index } - - match: {indices.7.attributes.0: open } - - match: {indices.8.name: my_remote_cluster:secured_via_alias} - - match: {indices.8.attributes.0: open} - - match: {indices.9.name: my_remote_cluster:shared_index} + - match: {indices.7.name: my_remote_cluster:field_caps_index_3} + - match: {indices.7.attributes.0: open} + - match: {indices.8.name: my_remote_cluster:point_in_time_index } + - match: {indices.8.attributes.0: open } + - match: {indices.9.name: my_remote_cluster:secured_via_alias} - match: {indices.9.attributes.0: open} - - match: {indices.10.name: my_remote_cluster:single_doc_index} + - match: {indices.10.name: my_remote_cluster:shared_index} - match: {indices.10.attributes.0: open} - - match: {indices.11.name: my_remote_cluster:terms_enum_index } - - match: {indices.11.attributes.0: open } - - match: {indices.12.name: my_remote_cluster:test_index} - - match: {indices.12.aliases.0: aliased_test_index} - - match: {indices.12.attributes.0: open} + - match: {indices.11.name: my_remote_cluster:single_doc_index} + - match: {indices.11.attributes.0: open} + - match: {indices.12.name: my_remote_cluster:terms_enum_index } + - match: {indices.12.attributes.0: open } + - match: {indices.13.name: my_remote_cluster:test_index} + - match: {indices.13.aliases.0: aliased_test_index} + - match: {indices.13.attributes.0: open} - match: {aliases.0.name: my_remote_cluster:.security} - match: {aliases.0.indices.0: .security-7} - match: {aliases.1.name: my_remote_cluster:aliased_closed_index} diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/10_basic.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/10_basic.yml index 4a5905a11feed..cbbfbe2372f3e 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/10_basic.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/10_basic.yml @@ -52,6 +52,7 @@ teardown: security.delete_role: name: "x_cluster_role" ignore: 404 + --- "Index data and search on the mixed cluster": @@ -236,6 +237,9 @@ teardown: - match: { aggregations.cluster.buckets.0.key: "local_cluster" } - match: { aggregations.cluster.buckets.0.doc_count: 5 } + - do: + indices.delete: + index: local_index --- "Add persistent remote cluster based on the preset cluster": - do: diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml new file mode 100644 index 0000000000000..1894a26e80f33 --- /dev/null +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -0,0 +1,143 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + - do: + security.put_user: + username: "joe" + body: > + { + "password": "s3krit-password", + "roles" : [ "x_cluster_role" ] + } + - do: + security.put_role: + name: "x_cluster_role" + body: > + { + "cluster": [], + "indices": [ + { + "names": ["local_index", "esql_local"], + "privileges": ["read"] + } + ] + } + + - do: + security.put_user: + username: "remote" + body: > + { + "password": "s3krit-password", + "roles" : [ "remote_ccs" ] + } + - do: + security.put_role: + name: "remote_ccs" + body: > + { + } +--- +teardown: + - do: + security.delete_user: + username: "joe" + ignore: 404 + - do: + security.delete_role: + name: "x_cluster_role" + ignore: 404 + +--- +"Index data and search on the mixed cluster": + + - do: + indices.create: + index: esql_local + body: + mappings: + properties: + since: + type: date + format: "yyyy-MM-dd" + cost: + type: long + tag: + type: keyword + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "esql_local"}}' + - '{"since" : "2023-01-01", "cost": 750, "tag": "monitor"}' + - '{"index": {"_index": "esql_local"}}' + - '{ "since" : "2023-01-02", "cost": 2100, "tag": "laptop"}' + - '{"index": {"_index": "esql_local"}}' + - '{"since" : "2023-01-03", "cost": 250, "tag": "monitor"}' + - '{"index": {"_index": "esql_local"}}' + - '{"since" : "2023-01-04", "cost": 100, "tag": "tablet"}' + - '{"index": {"_index": "esql_local"}}' + - '{"since" : "2023-01-05", "cost": 50, "tag": "headphone"}' + + - do: + headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } + esql.query: + body: + query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT tag | LIMIT 10' + + - match: {columns.0.name: "total"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "tag"} + - match: {columns.1.type: "keyword"} + + - match: {values.0.0: 2200} + - match: {values.0.1: "computer"} + - match: {values.1.0: 170} + - match: {values.1.1: "headphone"} + - match: {values.2.0: 2100 } + - match: {values.2.1: "laptop" } + - match: {values.3.0: 1000 } + - match: {values.3.1: "monitor" } + - match: {values.4.0: 550 } + - match: {values.4.1: "tablet" } + + - do: + headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } + esql.query: + body: + query: 'FROM *:esql*,esql_* [METADATA _index] | sort cost | KEEP _index, tag, cost | LIMIT 10' + filter: + range: + since: + gte: "2023-01-02" + lte: "2023-01-03" + format: "yyyy-MM-dd" + + - match: {columns.0.name: "_index"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "tag"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "cost" } + - match: {columns.2.type: "long" } + + - match: {values.0.0: "esql_local"} + - match: {values.0.1: "monitor"} + - match: {values.0.2: 250 } + - match: {values.1.0: "my_remote_cluster:esql_index" } + - match: {values.1.1: "tablet"} + - match: {values.1.2: 450 } + - match: {values.2.0: "my_remote_cluster:esql_index" } + - match: {values.2.1: "computer" } + - match: {values.2.2: 1200 } + - match: {values.3.0: "esql_local"} + - match: {values.3.1: "laptop" } + - match: {values.3.2: 2100 } + + - do: + indices.delete: + index: esql_local diff --git a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java index c8b3b3fc3aed2..5718930f37c82 100644 --- a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java +++ b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java @@ -28,6 +28,7 @@ import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -91,6 +92,7 @@ /** * An integration test for validating SAML authentication against a real Identity Provider (Shibboleth) */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103717") @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class SamlAuthenticationIT extends ESRestTestCase {