, ToXContentFragment {
public static final Version V_2_14_0 = new Version(2140099, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_14_1 = new Version(2140199, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_15_0 = new Version(2150099, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_2_16_0 = new Version(2160099, org.apache.lucene.util.Version.LUCENE_9_11_0);
public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_11_0);
public static final Version CURRENT = V_3_0_0;
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
index af09a7aebba79..711f56c9f3e3b 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
@@ -78,6 +78,19 @@ public static Compressor compressor(final BytesReference bytes) {
return null;
}
+ /**
+ * @param bytes The bytes to check the compression for
+ * @return The detected compressor. If no compressor detected then return NoneCompressor.
+ */
+ public static Compressor compressorForWritable(final BytesReference bytes) {
+ for (Compressor compressor : registeredCompressors.values()) {
+ if (compressor.isCompressed(bytes) == true) {
+ return compressor;
+ }
+ }
+ return CompressorRegistry.none();
+ }
+
/** Decompress the provided {@link BytesReference}. */
public static BytesReference uncompress(BytesReference bytes) throws IOException {
Compressor compressor = compressor(bytes);
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
index a278b61894a65..e7b51c3389b52 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
@@ -104,6 +104,10 @@ public long getTotalValue() {
return endValue.get() - startValue;
}
+ public long getStartValue() {
+ return startValue;
+ }
+
@Override
public String toString() {
return String.valueOf(getTotalValue());
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
new file mode 100644
index 0000000000000..373cdbfa7e9a1
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
@@ -0,0 +1,225 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.core.tasks.resourcetracker;
+
+import org.opensearch.common.annotation.PublicApi;
+import org.opensearch.core.ParseField;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.common.io.stream.Writeable;
+import org.opensearch.core.xcontent.ConstructingObjectParser;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.opensearch.core.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Task resource usage information with minimal information about the task
+ *
+ * Writeable TaskResourceInfo objects are used to represent resource usage
+ * information of running tasks, which can be propagated to coordinator node
+ * to infer query-level resource usage
+ *
+ * @opensearch.api
+ */
+@PublicApi(since = "2.15.0")
+public class TaskResourceInfo implements Writeable, ToXContentObject {
+ private final String action;
+ private final long taskId;
+ private final long parentTaskId;
+ private final String nodeId;
+ private final TaskResourceUsage taskResourceUsage;
+
+ private static final ParseField ACTION = new ParseField("action");
+ private static final ParseField TASK_ID = new ParseField("taskId");
+ private static final ParseField PARENT_TASK_ID = new ParseField("parentTaskId");
+ private static final ParseField NODE_ID = new ParseField("nodeId");
+ private static final ParseField TASK_RESOURCE_USAGE = new ParseField("taskResourceUsage");
+
+ public TaskResourceInfo(
+ final String action,
+ final long taskId,
+ final long parentTaskId,
+ final String nodeId,
+ final TaskResourceUsage taskResourceUsage
+ ) {
+ this.action = action;
+ this.taskId = taskId;
+ this.parentTaskId = parentTaskId;
+ this.nodeId = nodeId;
+ this.taskResourceUsage = taskResourceUsage;
+ }
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "task_resource_info",
+ a -> new Builder().setAction((String) a[0])
+ .setTaskId((Long) a[1])
+ .setParentTaskId((Long) a[2])
+ .setNodeId((String) a[3])
+ .setTaskResourceUsage((TaskResourceUsage) a[4])
+ .build()
+ );
+
+ static {
+ PARSER.declareString(constructorArg(), ACTION);
+ PARSER.declareLong(constructorArg(), TASK_ID);
+ PARSER.declareLong(constructorArg(), PARENT_TASK_ID);
+ PARSER.declareString(constructorArg(), NODE_ID);
+ PARSER.declareObject(constructorArg(), TaskResourceUsage.PARSER, TASK_RESOURCE_USAGE);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ACTION.getPreferredName(), this.action);
+ builder.field(TASK_ID.getPreferredName(), this.taskId);
+ builder.field(PARENT_TASK_ID.getPreferredName(), this.parentTaskId);
+ builder.field(NODE_ID.getPreferredName(), this.nodeId);
+ builder.startObject(TASK_RESOURCE_USAGE.getPreferredName());
+ this.taskResourceUsage.toXContent(builder, params);
+ builder.endObject();
+ builder.endObject();
+ return builder;
+ }
+
+ /**
+ * Builder for {@link TaskResourceInfo}
+ */
+ public static class Builder {
+ private TaskResourceUsage taskResourceUsage;
+ private String action;
+ private long taskId;
+ private long parentTaskId;
+ private String nodeId;
+
+ public Builder setTaskResourceUsage(final TaskResourceUsage taskResourceUsage) {
+ this.taskResourceUsage = taskResourceUsage;
+ return this;
+ }
+
+ public Builder setAction(final String action) {
+ this.action = action;
+ return this;
+ }
+
+ public Builder setTaskId(final long taskId) {
+ this.taskId = taskId;
+ return this;
+ }
+
+ public Builder setParentTaskId(final long parentTaskId) {
+ this.parentTaskId = parentTaskId;
+ return this;
+ }
+
+ public Builder setNodeId(final String nodeId) {
+ this.nodeId = nodeId;
+ return this;
+ }
+
+ public TaskResourceInfo build() {
+ return new TaskResourceInfo(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+ }
+
+ /**
+ * Read task info from a stream.
+ *
+ * @param in StreamInput to read
+ * @return {@link TaskResourceInfo}
+ * @throws IOException IOException
+ */
+ public static TaskResourceInfo readFromStream(StreamInput in) throws IOException {
+ return new TaskResourceInfo.Builder().setAction(in.readString())
+ .setTaskId(in.readLong())
+ .setParentTaskId(in.readLong())
+ .setNodeId(in.readString())
+ .setTaskResourceUsage(TaskResourceUsage.readFromStream(in))
+ .build();
+ }
+
+ /**
+ * Get TaskResourceUsage
+ *
+ * @return taskResourceUsage
+ */
+ public TaskResourceUsage getTaskResourceUsage() {
+ return taskResourceUsage;
+ }
+
+ /**
+ * Get parent task id
+ *
+ * @return parent task id
+ */
+ public long getParentTaskId() {
+ return parentTaskId;
+ }
+
+ /**
+ * Get task id
+ * @return task id
+ */
+ public long getTaskId() {
+ return taskId;
+ }
+
+ /**
+ * Get node id
+ * @return node id
+ */
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ /**
+ * Get task action
+ * @return task action
+ */
+ public String getAction() {
+ return action;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(action);
+ out.writeLong(taskId);
+ out.writeLong(parentTaskId);
+ out.writeString(nodeId);
+ taskResourceUsage.writeTo(out);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(MediaTypeRegistry.JSON, this);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || obj.getClass() != TaskResourceInfo.class) {
+ return false;
+ }
+ TaskResourceInfo other = (TaskResourceInfo) obj;
+ return action.equals(other.action)
+ && taskId == other.taskId
+ && parentTaskId == other.parentTaskId
+ && Objects.equals(nodeId, other.nodeId)
+ && taskResourceUsage.equals(other.taskResourceUsage);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
index c861c21f89fc5..bcf5c163cb91f 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -48,6 +48,11 @@ public Closeable createGauge(String name, String description, String unit, Suppl
return metricsTelemetry.createGauge(name, description, unit, valueProvider, tags);
}
+ @Override
+ public Closeable createGauge(String name, String description, String unit, Supplier value) {
+ return metricsTelemetry.createGauge(name, description, unit, value);
+ }
+
@Override
public void close() throws IOException {
metricsTelemetry.close();
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
index 3ab3dcf82c7a7..3dc212b1341cc 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -63,4 +63,16 @@ public interface MetricsRegistry extends Closeable {
*/
Closeable createGauge(String name, String description, String unit, Supplier valueProvider, Tags tags);
+ /**
+ * Creates the Observable Gauge type of Metric. Where the value provider will be called at a certain frequency
+ * to capture the value.
+ *
+ * @param name name of the observable gauge.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @param value value provider.
+ * @return closeable to dispose/close the Gauge metric.
+ */
+ Closeable createGauge(String name, String description, String unit, Supplier value);
+
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java
new file mode 100644
index 0000000000000..707f2c79c62f2
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java
@@ -0,0 +1,53 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Observable Measurement for the Asynchronous instruments.
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+public final class TaggedMeasurement {
+ private final Double value;
+ private final Tags tags;
+
+ /**
+ * Factory method to create the {@link TaggedMeasurement} object.
+ * @param value value.
+ * @param tags tags to be added per value.
+ * @return tagged measurement TaggedMeasurement
+ */
+ public static TaggedMeasurement create(double value, Tags tags) {
+ return new TaggedMeasurement(value, tags);
+ }
+
+ private TaggedMeasurement(double value, Tags tags) {
+ this.value = value;
+ this.tags = tags;
+ }
+
+ /**
+ * Returns the value.
+ * @return value
+ */
+ public Double getValue() {
+ return value;
+ }
+
+ /**
+ * Returns the tags.
+ * @return tags
+ */
+ public Tags getTags() {
+ return tags;
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
index 9a913d25e872d..7bec136c42ba7 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -12,6 +12,7 @@
import org.opensearch.telemetry.metrics.Counter;
import org.opensearch.telemetry.metrics.Histogram;
import org.opensearch.telemetry.metrics.MetricsRegistry;
+import org.opensearch.telemetry.metrics.TaggedMeasurement;
import org.opensearch.telemetry.metrics.tags.Tags;
import java.io.Closeable;
@@ -52,6 +53,11 @@ public Closeable createGauge(String name, String description, String unit, Suppl
return () -> {};
}
+ @Override
+ public Closeable createGauge(String name, String description, String unit, Supplier value) {
+ return () -> {};
+ }
+
@Override
public void close() throws IOException {
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
index 872f697ade09e..e1506eecff6e9 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -79,4 +79,19 @@ public void testGauge() {
assertSame(mockCloseable, closeable);
}
+ @SuppressWarnings("unchecked")
+ public void testGaugeWithValueAndTagSupplier() {
+ Closeable mockCloseable = mock(Closeable.class);
+ when(defaultMeterRegistry.createGauge(any(String.class), any(String.class), any(String.class), any(Supplier.class))).thenReturn(
+ mockCloseable
+ );
+ Closeable closeable = defaultMeterRegistry.createGauge(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testObservableGauge",
+ "test observable gauge",
+ "ms",
+ () -> TaggedMeasurement.create(1.0, Tags.EMPTY)
+ );
+ assertSame(mockCloseable, closeable);
+ }
+
}
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-4be6531.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index ee00419f52066..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8752daf173a642ae02e081cc0398f2ce59278200
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.11.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..29aade3ad4298
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.11.0.jar.sha1
@@ -0,0 +1 @@
+5e21d20edee0712472e7c6f605c9d97aeecf16c0
\ No newline at end of file
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml
new file mode 100644
index 0000000000000..4f700c3b83e8f
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml
@@ -0,0 +1,421 @@
+"Test derived_field supported type using index mapping definition":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {
+ text: "peck of pickled peppers",
+ keyword: "baz",
+ long: -3,
+ float: -3.0,
+ double: -3.0,
+ date: "2017-01-03T00:00:00Z",
+ geo: [20.0, 40.0],
+ ip: "192.168.0.3",
+ boolean: true,
+ array_of_long: [3, 4],
+ json_field: "{\"keyword\":\"json_keyword3\",\"long\":30,\"float\":30.0,\"double\":30.0,\"date\":\"2021-03-01T00:00:00Z\",\"ip\":\"10.0.0.3\",\"boolean\":true, \"array_of_long\": [3, 4]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {
+ text: "pickled peppers",
+ keyword: "qux",
+ long: 4,
+ float: 4.0,
+ double: 4.0,
+ date: "2017-01-04T00:00:00Z",
+ geo: [30.0, 50.0],
+ ip: "192.168.0.4",
+ boolean: false,
+ array_of_long: [4, 5],
+ json_field: "{\"keyword\":\"json_keyword4\",\"long\":40,\"float\":40.0,\"double\":40.0,\"date\":\"2021-04-01T00:00:00Z\",\"ip\":\"10.0.0.4\",\"boolean\":false, \"array_of_long\": [4, 5]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 5
+ body: {
+ text: "peppers",
+ keyword: "quux",
+ long: 5,
+ float: 5.0,
+ double: 5.0,
+ date: "2017-01-05T00:00:00Z",
+ geo: [40.0, 60.0],
+ ip: "192.168.0.5",
+ boolean: true,
+ array_of_long: [5, 6],
+ json_field: "{\"keyword\":\"json_keyword5\",\"long\":50,\"float\":50.0,\"double\":50.0,\"date\":\"2021-05-01T00:00:00Z\",\"ip\":\"10.0.0.5\",\"boolean\":true, \"array_of_long\": [5, 6]}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ # Tests for derived_text
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ match_phrase:
+ derived_text:
+ query: "peter piper"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_keyword:
+ value: "foo"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_long:
+ gte: 1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_float:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_double:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_date:
+ gte: "2017-01-02"
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_geo
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ geo_distance:
+ distance: "20km"
+ derived_geo:
+ lat: 0.0
+ lon: 20.0
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_ip:
+ value: "192.168.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.keyword:
+ value: "json_keyword1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.long:
+ gte: 11
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.float:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.double:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.date:
+ gte: "2021-03-01"
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.ip:
+ value: "10.0.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for query string
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ q: "derived_keyword:foo"
+
+ - match: { hits.total: 1 }
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ q: derived_object.keyword:json_keyword1
+
+ - match: { hits.total: 1 }
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml
new file mode 100644
index 0000000000000..0370fd94e8548
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml
@@ -0,0 +1,123 @@
+---
+"Test create and update mapping for derived fields":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: keyword
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_date:
+ type: date
+ script: "emit(params._source[\"keyword\"])"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: date}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: keyword}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: json_field}
+
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: keyword
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_date:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_object:
+ type: object
+ properties:
+ keyword: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ format: "dd-MM-yyyy"
+ ignore_malformed: true
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: keyword}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: text}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_object.format: "dd-MM-yyyy"}
+ - match: {test_index.mappings.derived.derived_object.ignore_malformed: true}
+
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ ignore_malformed: false
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: keyword}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: keyword}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: json_field}
+ - is_false: test_index.mappings.derived.derived_object.ignore_malformed
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml
new file mode 100644
index 0000000000000..bb619dce63010
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml
@@ -0,0 +1,489 @@
+"Test derived_field supported type using search definition":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {
+ text: "peck of pickled peppers",
+ keyword: "baz",
+ long: -3,
+ float: -3.0,
+ double: -3.0,
+ date: "2017-01-03T00:00:00Z",
+ geo: [20.0, 40.0],
+ ip: "192.168.0.3",
+ boolean: true,
+ array_of_long: [3, 4],
+ json_field: "{\"keyword\":\"json_keyword3\",\"long\":30,\"float\":30.0,\"double\":30.0,\"date\":\"2021-03-01T00:00:00Z\",\"ip\":\"10.0.0.3\",\"boolean\":true, \"array_of_long\": [3, 4]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {
+ text: "pickled peppers",
+ keyword: "qux",
+ long: 4,
+ float: 4.0,
+ double: 4.0,
+ date: "2017-01-04T00:00:00Z",
+ geo: [30.0, 50.0],
+ ip: "192.168.0.4",
+ boolean: false,
+ array_of_long: [4, 5],
+ json_field: "{\"keyword\":\"json_keyword4\",\"long\":40,\"float\":40.0,\"double\":40.0,\"date\":\"2021-04-01T00:00:00Z\",\"ip\":\"10.0.0.4\",\"boolean\":false, \"array_of_long\": [4, 5]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 5
+ body: {
+ text: "peppers",
+ keyword: "quux",
+ long: 5,
+ float: 5.0,
+ double: 5.0,
+ date: "2017-01-05T00:00:00Z",
+ geo: [40.0, 60.0],
+ ip: "192.168.0.5",
+ boolean: true,
+ array_of_long: [5, 6],
+ json_field: "{\"keyword\":\"json_keyword5\",\"long\":50,\"float\":50.0,\"double\":50.0,\"date\":\"2021-05-01T00:00:00Z\",\"ip\":\"10.0.0.5\",\"boolean\":true, \"array_of_long\": [5, 6]}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ # Tests for derived_text
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ query:
+ match_phrase:
+ derived_text:
+ query: "peter piper"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ query:
+ term:
+ derived_keyword:
+ value: "foo"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ query:
+ range:
+ derived_long:
+ gte: 1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ query:
+ range:
+ derived_float:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ query:
+ range:
+ derived_double:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ query:
+ range:
+ derived_date:
+ gte: "2017-01-02"
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_geo
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ query:
+ geo_distance:
+ distance: "20km"
+ derived_geo:
+ lat: 0.0
+ lon: 20.0
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ query:
+ term:
+ derived_ip:
+ value: "192.168.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ query:
+ term:
+ derived_boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ query:
+ range:
+ derived_array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.keyword:
+ value: "json_keyword1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.long:
+ gte: 11
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.float:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.double:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.date:
+ gte: "2021-03-01"
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.ip:
+ value: "10.0.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for query string
+ - do:
+ search:
+ body:
+ derived:
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ rest_total_hits_as_int: true
+ index: test
+ q: "derived_keyword:foo"
+
+ - match: { hits.total: 1 }
+
+ - do:
+ search:
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ rest_total_hits_as_int: true
+ index: test
+ q: derived_object.keyword:json_keyword1
+
+ - match: { hits.total: 1 }
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml
new file mode 100644
index 0000000000000..52a897c341419
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml
@@ -0,0 +1,279 @@
+setup:
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+---
+"Test basic field retrieval":
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ format: "yyyy-MM-dd"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ - do:
+ search:
+ index: test
+ body:
+ fields: [derived_text, derived_keyword, derived_long, derived_float, derived_double, derived_date, derived_geo, derived_ip, derived_boolean, derived_array_of_long,
+ derived_object, derived_object.keyword, derived_object.long, derived_object.float, derived_object.double, derived_object.date, derived_object.ip, derived_object.boolean, derived_object.array_of_long]
+
+ - is_true: hits.hits.0._id
+ - is_true: hits.hits.0._source
+
+ - match: { hits.hits.0.fields.derived_text.0: "peter piper" }
+ - match: { hits.hits.0.fields.derived_keyword.0: foo }
+ - match: { hits.hits.0.fields.derived_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_float.0: 1.0 }
+ - match: { hits.hits.0.fields.derived_double.0: 1 }
+ - match: { hits.hits.0.fields.derived_date.0: 2017-01-01T00:00:00.000Z }
+ - match: { hits.hits.0.fields.derived_geo.0.lat: 0.0 }
+ - match: { hits.hits.0.fields.derived_geo.0.lon: 20.0 }
+ - match: { hits.hits.0.fields.derived_ip.0: 192.168.0.1 }
+ - match: { hits.hits.0.fields.derived_array_of_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_array_of_long.1: 2 }
+ - match: { hits.hits.0.fields.derived_object.0: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}" }
+ - match: { hits.hits.0.fields.derived_object\.keyword.0: json_keyword1 }
+ - match: { hits.hits.0.fields.derived_object\.long.0: 10 }
+ - match: { hits.hits.0.fields.derived_object\.float.0: 10.0 }
+ - match: { hits.hits.0.fields.derived_object\.double.0: 10.0 }
+ - match: { hits.hits.0.fields.derived_object\.date.0: 2021-01-01 }
+ - match: { hits.hits.0.fields.derived_object\.ip.0: 10.0.0.1 }
+ - match: { hits.hits.0.fields.derived_object\.boolean.0: true }
+ - match: { hits.hits.0.fields.derived_object\.array_of_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_object\.array_of_long.1: 2 }
+
+ - match: { hits.hits.1.fields.derived_text.0: "piper picked a peck" }
+ - match: { hits.hits.1.fields.derived_keyword.0: bar }
+ - match: { hits.hits.1.fields.derived_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_float.0: 2.0 }
+ - match: { hits.hits.1.fields.derived_double.0: 2 }
+ - match: { hits.hits.1.fields.derived_date.0: 2017-01-02T00:00:00.000Z }
+ - match: { hits.hits.1.fields.derived_geo.0.lat: 10.0 }
+ - match: { hits.hits.1.fields.derived_geo.0.lon: 30.0 }
+ - match: { hits.hits.1.fields.derived_ip.0: 192.168.0.2 }
+ - match: { hits.hits.1.fields.derived_array_of_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_array_of_long.1: 3 }
+ - match: { hits.hits.1.fields.derived_object.0: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}" }
+ - match: { hits.hits.1.fields.derived_object\.keyword.0: json_keyword2 }
+ - match: { hits.hits.1.fields.derived_object\.long.0: 20 }
+ - match: { hits.hits.1.fields.derived_object\.float.0: 20.0 }
+ - match: { hits.hits.1.fields.derived_object\.double.0: 20.0 }
+ - match: { hits.hits.1.fields.derived_object\.date.0: 2021-02-01 }
+ - match: { hits.hits.1.fields.derived_object\.ip.0: 10.0.0.2 }
+ - match: { hits.hits.1.fields.derived_object\.boolean.0: false }
+ - match: { hits.hits.1.fields.derived_object\.array_of_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_object\.array_of_long.1: 3 }
+
+
+---
+"Test highlight":
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ array_of_text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_array_of_text:
+ type: text
+ script: "emit(params._source[\"array_of_text\"][0]);emit(params._source[\"array_of_text\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_text: ["The quick brown fox is brown", "The quick brown fox is black"],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_text\": [\"The quick brown fox is brown\", \"The quick brown fox is black\"]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_text: ["The quick brown fox is brown", "The quick brown fox is black"],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_text\": [\"The quick brown fox is brown\", \"The quick brown fox is black\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ body: { "query" : {"multi_match" : { "query" : "piper", "fields" : [ "derived_text"] } },
+ "fields": [derived_text],
+ "highlight" : { "type" : "unified", "fields" : { "derived_text" : {} } }
+ }
+
+ - match: {hits.hits.0.highlight.derived_text.0: "peter piper"}
+
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ body: { "query" : {"multi_match" : { "query" : "quick brown", "fields" : [ "derived_array_of_text"] } },
+ "fields": [derived_array_of_text],
+ "highlight" : { "type" : "unified", "fields" : { "derived_array_of_text" : {} } }
+ }
+
+ - match: {hits.hits.0.highlight.derived_array_of_text.0: "The quick brown fox is brown"}
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ match_phrase:
+ derived_object.array_of_text:
+ query: "quick brown"
+ highlight:
+ type: unified
+ fields:
+ derived_object.array_of_text: {}
+
+ - match: {hits.hits.0.highlight.derived_object\.array_of_text.0: "The quick brown fox is brown"}
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml
new file mode 100644
index 0000000000000..e10c9cb3c133f
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml
@@ -0,0 +1,105 @@
+---
+"Test default index analyzer simple is applied on derived fields":
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ index.analysis.analyzer.default.type: simple
+ mappings:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "Email: example@example.com, Visit https://example.com for more info.",
+ json_field: "{\"array_of_text\": [\"Email: example@example.com, Visit https://example.com for more info.\", \"Email: example@example.com, Visit https://example.com for more info.\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ index: test
+ q: "derived_text:example.com"
+ analyzer: standard
+
+ - match: { hits.total.value: 0 }
+
+ - do:
+ search:
+ index: test
+ q: "derived_text:example.com"
+ analyzer: simple
+
+ - match: { hits.total.value: 1 }
+
+---
+"Test default index analyzer standard is applied on derived fields":
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ index.analysis.analyzer.default.type: standard
+ mappings:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "Email: example@example.com, Visit https://example.com for more info.",
+ json_field: "{\"array_of_text\": [\"Email: example@example.com, Visit https://example.com for more info.\", \"Email: example@example.com, Visit https://example.com for more info.\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ index: test
+ q: "derived_object.array_of_text:example.com"
+ analyzer: standard
+
+ - match: { hits.total.value: 1 }
+
+ - do:
+ search:
+ index: test
+ q: "derived_object.array_of_text:example.com"
+ analyzer: simple
+
+ - match: { hits.total.value: 1 }
diff --git a/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java b/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
index 09fd52ff65c66..6d5020336eb0b 100644
--- a/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
+++ b/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
@@ -54,6 +54,7 @@
import org.opensearch.rest.action.admin.indices.RestRefreshAction;
import org.opensearch.rest.action.admin.indices.RestUpdateSettingsAction;
import org.opensearch.rest.action.document.RestBulkAction;
+import org.opensearch.rest.action.document.RestBulkStreamingAction;
import org.opensearch.rest.action.document.RestDeleteAction;
import org.opensearch.rest.action.document.RestGetAction;
import org.opensearch.rest.action.document.RestIndexAction;
@@ -127,6 +128,7 @@ public List getRestHandlers(
new OpenSearchDashboardsWrappedRestHandler(new RestMultiGetAction(settings)),
new OpenSearchDashboardsWrappedRestHandler(new RestSearchAction()),
new OpenSearchDashboardsWrappedRestHandler(new RestBulkAction(settings)),
+ new OpenSearchDashboardsWrappedRestHandler(new RestBulkStreamingAction(settings)),
new OpenSearchDashboardsWrappedRestHandler(new RestDeleteAction()),
new OpenSearchDashboardsWrappedRestHandler(new RestDeleteByQueryAction()),
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 04338d8933590..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-12630ff9c56e2a372ba57f519c579ff9e728208a
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..6f0501d3312ae
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0.jar.sha1
@@ -0,0 +1 @@
+5c7f2d8eab0fca3fdc3d3e57a7f48a335dc7ac33
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index b8da0dacfe9f1..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-752bfc61c7829be6c27d9c1764250196e2c6b06b
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..25031381c9cb3
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0.jar.sha1
@@ -0,0 +1 @@
+efcf65dda1b4e9d7e83926fd5895a47e491cbf29
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index b58adc03938f3..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5ca56d42b24498a226cf91f48b94e010b6af5fe2
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..e27d45b217dad
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0.jar.sha1
@@ -0,0 +1 @@
+59599d7b8bed2e6bd27d0dad7935c078b98c39cc
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index dea962647d995..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8eb59a89aa8984457798ccffb8e97e5351bebc1f
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..ad5473865537d
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0.jar.sha1
@@ -0,0 +1 @@
+e55f83bb373ac139e313f64e80afe1eb0a75b8c0
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 1259b95a789a5..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-851c1bd99eaef368e84335853dd448e4f56cdbc8
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..68abd162e7266
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0.jar.sha1
@@ -0,0 +1 @@
+1be59d91c45a4de069611fb7f8aa3e8fd26020ec
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 8c0d8fd278b89..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-63647085d41ae231733580c20a498ce7c9134ce5
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..c5f1521ec3769
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0.jar.sha1
@@ -0,0 +1 @@
+d5b5922acf3743b5a0c542959dd93fca8be333a7
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-4be6531.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 0eb1fb5f2b31f..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a3ba7dd03b1df9efed08eb544689d51d2be22aa5
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..b676ca507467a
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0.jar.sha1
@@ -0,0 +1 @@
+50fd7b471cbdd6648c4972169f3fc67fae9db7f6
\ No newline at end of file
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java
index 22831c3e0f8ba..bba676436c39a 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java
@@ -111,7 +111,15 @@ public List> getSettings() {
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED,
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE,
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE,
- QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS
+ QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_ENABLED,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_SIZE,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_WINDOW_SIZE,
+ QueryInsightsSettings.TOP_N_CPU_EXPORTER_SETTINGS,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_ENABLED,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_SIZE,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_WINDOW_SIZE,
+ QueryInsightsSettings.TOP_N_MEMORY_EXPORTER_SETTINGS
);
}
}
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/exporter/QueryInsightsExporterFactory.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/exporter/QueryInsightsExporterFactory.java
index 7324590c9f582..016911761a3d0 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/exporter/QueryInsightsExporterFactory.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/exporter/QueryInsightsExporterFactory.java
@@ -19,7 +19,7 @@
import java.util.Locale;
import java.util.Set;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_N_QUERIES_INDEX_PATTERN;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_QUERIES_EXPORTER_TYPE;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.EXPORTER_TYPE;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.EXPORT_INDEX;
@@ -71,7 +71,7 @@ public void validateExporterConfig(final Settings settings) throws IllegalArgume
}
switch (type) {
case LOCAL_INDEX:
- final String indexPattern = settings.get(EXPORT_INDEX, DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN);
+ final String indexPattern = settings.get(EXPORT_INDEX, DEFAULT_TOP_N_QUERIES_INDEX_PATTERN);
if (indexPattern.length() == 0) {
throw new IllegalArgumentException("Empty index pattern configured for the exporter");
}
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java
index 9ec8673147c38..a1f810ad5987c 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java
@@ -14,23 +14,27 @@
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchRequestContext;
import org.opensearch.action.search.SearchRequestOperationsListener;
+import org.opensearch.action.search.SearchTask;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
+import org.opensearch.core.tasks.resourcetracker.TaskResourceInfo;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.plugin.insights.core.service.QueryInsightsService;
import org.opensearch.plugin.insights.rules.model.Attribute;
import org.opensearch.plugin.insights.rules.model.MetricType;
import org.opensearch.plugin.insights.rules.model.SearchQueryRecord;
+import org.opensearch.tasks.Task;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.getTopNEnabledSetting;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.getTopNSizeSetting;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.getTopNWindowSizeSetting;
/**
* The listener for query insights services.
@@ -45,6 +49,7 @@ public final class QueryInsightsListener extends SearchRequestOperationsListener
private static final Logger log = LogManager.getLogger(QueryInsightsListener.class);
private final QueryInsightsService queryInsightsService;
+ private final ClusterService clusterService;
/**
* Constructor for QueryInsightsListener
@@ -54,26 +59,32 @@ public final class QueryInsightsListener extends SearchRequestOperationsListener
*/
@Inject
public QueryInsightsListener(final ClusterService clusterService, final QueryInsightsService queryInsightsService) {
+ this.clusterService = clusterService;
this.queryInsightsService = queryInsightsService;
- clusterService.getClusterSettings()
- .addSettingsUpdateConsumer(TOP_N_LATENCY_QUERIES_ENABLED, v -> this.setEnableTopQueries(MetricType.LATENCY, v));
- clusterService.getClusterSettings()
- .addSettingsUpdateConsumer(
- TOP_N_LATENCY_QUERIES_SIZE,
- v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setTopNSize(v),
- v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateTopNSize(v)
- );
- clusterService.getClusterSettings()
- .addSettingsUpdateConsumer(
- TOP_N_LATENCY_QUERIES_WINDOW_SIZE,
- v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setWindowSize(v),
- v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateWindowSize(v)
- );
- this.setEnableTopQueries(MetricType.LATENCY, clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_ENABLED));
- this.queryInsightsService.getTopQueriesService(MetricType.LATENCY)
- .setTopNSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_SIZE));
- this.queryInsightsService.getTopQueriesService(MetricType.LATENCY)
- .setWindowSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_WINDOW_SIZE));
+ // Setting endpoints set up for top n queries, including enabling top n queries, window size and top n size
+ // Expected metricTypes are Latency, CPU and Memory.
+ for (MetricType type : MetricType.allMetricTypes()) {
+ clusterService.getClusterSettings()
+ .addSettingsUpdateConsumer(getTopNEnabledSetting(type), v -> this.setEnableTopQueries(type, v));
+ clusterService.getClusterSettings()
+ .addSettingsUpdateConsumer(
+ getTopNSizeSetting(type),
+ v -> this.queryInsightsService.setTopNSize(type, v),
+ v -> this.queryInsightsService.validateTopNSize(type, v)
+ );
+ clusterService.getClusterSettings()
+ .addSettingsUpdateConsumer(
+ getTopNWindowSizeSetting(type),
+ v -> this.queryInsightsService.setWindowSize(type, v),
+ v -> this.queryInsightsService.validateWindowSize(type, v)
+ );
+
+ this.setEnableTopQueries(type, clusterService.getClusterSettings().get(getTopNEnabledSetting(type)));
+ this.queryInsightsService.validateTopNSize(type, clusterService.getClusterSettings().get(getTopNSizeSetting(type)));
+ this.queryInsightsService.setTopNSize(type, clusterService.getClusterSettings().get(getTopNSizeSetting(type)));
+ this.queryInsightsService.validateWindowSize(type, clusterService.getClusterSettings().get(getTopNWindowSizeSetting(type)));
+ this.queryInsightsService.setWindowSize(type, clusterService.getClusterSettings().get(getTopNWindowSizeSetting(type)));
+ }
}
/**
@@ -123,6 +134,27 @@ public void onRequestStart(SearchRequestContext searchRequestContext) {}
@Override
public void onRequestEnd(final SearchPhaseContext context, final SearchRequestContext searchRequestContext) {
+ constructSearchQueryRecord(context, searchRequestContext);
+ }
+
+ @Override
+ public void onRequestFailure(final SearchPhaseContext context, final SearchRequestContext searchRequestContext) {
+ constructSearchQueryRecord(context, searchRequestContext);
+ }
+
+ private void constructSearchQueryRecord(final SearchPhaseContext context, final SearchRequestContext searchRequestContext) {
+ SearchTask searchTask = context.getTask();
+ List tasksResourceUsages = searchRequestContext.getPhaseResourceUsage();
+ tasksResourceUsages.add(
+ new TaskResourceInfo(
+ searchTask.getAction(),
+ searchTask.getId(),
+ searchTask.getParentTaskId().getId(),
+ clusterService.localNode().getId(),
+ searchTask.getTotalResourceStats()
+ )
+ );
+
final SearchRequest request = context.getRequest();
try {
Map measurements = new HashMap<>();
@@ -132,16 +164,39 @@ public void onRequestEnd(final SearchPhaseContext context, final SearchRequestCo
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - searchRequestContext.getAbsoluteStartNanos())
);
}
+ if (queryInsightsService.isCollectionEnabled(MetricType.CPU)) {
+ measurements.put(
+ MetricType.CPU,
+ tasksResourceUsages.stream().map(a -> a.getTaskResourceUsage().getCpuTimeInNanos()).mapToLong(Long::longValue).sum()
+ );
+ }
+ if (queryInsightsService.isCollectionEnabled(MetricType.MEMORY)) {
+ measurements.put(
+ MetricType.MEMORY,
+ tasksResourceUsages.stream().map(a -> a.getTaskResourceUsage().getMemoryInBytes()).mapToLong(Long::longValue).sum()
+ );
+ }
Map attributes = new HashMap<>();
attributes.put(Attribute.SEARCH_TYPE, request.searchType().toString().toLowerCase(Locale.ROOT));
attributes.put(Attribute.SOURCE, request.source().toString(FORMAT_PARAMS));
attributes.put(Attribute.TOTAL_SHARDS, context.getNumShards());
attributes.put(Attribute.INDICES, request.indices());
attributes.put(Attribute.PHASE_LATENCY_MAP, searchRequestContext.phaseTookMap());
+ attributes.put(Attribute.TASK_RESOURCE_USAGES, tasksResourceUsages);
+
+ Map labels = new HashMap<>();
+ // Retrieve user provided label if exists
+ String userProvidedLabel = context.getTask().getHeader(Task.X_OPAQUE_ID);
+ if (userProvidedLabel != null) {
+ labels.put(Task.X_OPAQUE_ID, userProvidedLabel);
+ }
+ attributes.put(Attribute.LABELS, labels);
+ // construct SearchQueryRecord from attributes and measurements
SearchQueryRecord record = new SearchQueryRecord(request.getOrCreateAbsoluteStartMillis(), measurements, attributes);
queryInsightsService.addRecord(record);
} catch (Exception e) {
log.error(String.format(Locale.ROOT, "fail to ingest query insight data, error: %s", e));
}
}
+
}
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java
index a83bb2094f165..c63430a1a726c 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java
@@ -12,6 +12,8 @@
import org.opensearch.common.inject.Inject;
import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
import org.opensearch.common.settings.ClusterSettings;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
import org.opensearch.plugin.insights.core.exporter.QueryInsightsExporterFactory;
import org.opensearch.plugin.insights.rules.model.MetricType;
import org.opensearch.plugin.insights.rules.model.SearchQueryRecord;
@@ -27,7 +29,7 @@
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.getExporterSettings;
/**
* Service responsible for gathering, analyzing, storing and exporting
@@ -86,11 +88,13 @@ public QueryInsightsService(final ClusterSettings clusterSettings, final ThreadP
enableCollect.put(metricType, false);
topQueriesServices.put(metricType, new TopQueriesService(metricType, threadPool, queryInsightsExporterFactory));
}
- clusterSettings.addSettingsUpdateConsumer(
- TOP_N_LATENCY_EXPORTER_SETTINGS,
- (settings -> getTopQueriesService(MetricType.LATENCY).setExporter(settings)),
- (settings -> getTopQueriesService(MetricType.LATENCY).validateExporterConfig(settings))
- );
+ for (MetricType type : MetricType.allMetricTypes()) {
+ clusterSettings.addSettingsUpdateConsumer(
+ getExporterSettings(type),
+ (settings -> setExporter(type, settings)),
+ (settings -> validateExporterConfig(type, settings))
+ );
+ }
}
/**
@@ -177,6 +181,78 @@ public boolean isEnabled() {
return false;
}
+ /**
+ * Validate the window size config for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param windowSize {@link TimeValue}
+ */
+ public void validateWindowSize(final MetricType type, final TimeValue windowSize) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).validateWindowSize(windowSize);
+ }
+ }
+
+ /**
+ * Set window size for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param windowSize {@link TimeValue}
+ */
+ public void setWindowSize(final MetricType type, final TimeValue windowSize) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).setWindowSize(windowSize);
+ }
+ }
+
+ /**
+ * Validate the top n size config for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param topNSize top n size
+ */
+ public void validateTopNSize(final MetricType type, final int topNSize) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).validateTopNSize(topNSize);
+ }
+ }
+
+ /**
+ * Set the top n size config for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param topNSize top n size
+ */
+ public void setTopNSize(final MetricType type, final int topNSize) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).setTopNSize(topNSize);
+ }
+ }
+
+ /**
+ * Set the exporter config for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param settings exporter settings
+ */
+ public void setExporter(final MetricType type, final Settings settings) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).setExporter(settings);
+ }
+ }
+
+ /**
+ * Validate the exporter config for a metricType
+ *
+ * @param type {@link MetricType}
+ * @param settings exporter settings
+ */
+ public void validateExporterConfig(final MetricType type, final Settings settings) {
+ if (topQueriesServices.containsKey(type)) {
+ topQueriesServices.get(type).validateExporterConfig(settings);
+ }
+ }
+
@Override
protected void doStart() {
if (isEnabled()) {
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java
index ff90edf1ec33d..c21b89be4dcca 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java
@@ -35,7 +35,7 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN;
+import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_N_QUERIES_INDEX_PATTERN;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.DEFAULT_TOP_QUERIES_EXPORTER_TYPE;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.EXPORTER_TYPE;
import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.EXPORT_INDEX;
@@ -218,10 +218,7 @@ public void setExporter(final Settings settings) {
if (settings.get(EXPORTER_TYPE) != null) {
SinkType expectedType = SinkType.parse(settings.get(EXPORTER_TYPE, DEFAULT_TOP_QUERIES_EXPORTER_TYPE));
if (exporter != null && expectedType == SinkType.getSinkTypeFromExporter(exporter)) {
- queryInsightsExporterFactory.updateExporter(
- exporter,
- settings.get(EXPORT_INDEX, DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN)
- );
+ queryInsightsExporterFactory.updateExporter(exporter, settings.get(EXPORT_INDEX, DEFAULT_TOP_N_QUERIES_INDEX_PATTERN));
} else {
try {
queryInsightsExporterFactory.closeExporter(this.exporter);
@@ -230,7 +227,7 @@ public void setExporter(final Settings settings) {
}
this.exporter = queryInsightsExporterFactory.createExporter(
SinkType.parse(settings.get(EXPORTER_TYPE, DEFAULT_TOP_QUERIES_EXPORTER_TYPE)),
- settings.get(EXPORT_INDEX, DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN)
+ settings.get(EXPORT_INDEX, DEFAULT_TOP_N_QUERIES_INDEX_PATTERN)
);
}
} else {
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java
index c1d17edf9ff14..dcdb085fdc6fa 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java
@@ -43,7 +43,15 @@ public enum Attribute {
/**
* The node id for this request
*/
- NODE_ID;
+ NODE_ID,
+ /**
+ * Tasks level resource usages in this request
+ */
+ TASK_RESOURCE_USAGES,
+ /**
+ * Custom search request labels
+ */
+ LABELS;
/**
* Read an Attribute from a StreamInput
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java
index cdd090fbf4804..4694c757f4ef2 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java
@@ -35,7 +35,7 @@ public enum MetricType implements Comparator {
/**
* JVM heap usage metric type
*/
- JVM;
+ MEMORY;
/**
* Read a MetricType from a StreamInput
@@ -93,10 +93,9 @@ public static Set allMetricTypes() {
public int compare(final Number a, final Number b) {
switch (this) {
case LATENCY:
- return Long.compare(a.longValue(), b.longValue());
- case JVM:
case CPU:
- return Double.compare(a.doubleValue(), b.doubleValue());
+ case MEMORY:
+ return Long.compare(a.longValue(), b.longValue());
}
return -1;
}
@@ -110,10 +109,9 @@ public int compare(final Number a, final Number b) {
Number parseValue(final Object o) {
switch (this) {
case LATENCY:
- return (Long) o;
- case JVM:
case CPU:
- return (Double) o;
+ case MEMORY:
+ return (Long) o;
default:
return (Number) o;
}
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java
index ddf614211bc41..7949b70a16db6 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java
@@ -8,7 +8,6 @@
package org.opensearch.plugin.insights.rules.transport.top_queries;
-import org.opensearch.OpenSearchException;
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.nodes.TransportNodesAction;
@@ -21,7 +20,6 @@
import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction;
import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest;
import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse;
-import org.opensearch.plugin.insights.rules.model.MetricType;
import org.opensearch.plugin.insights.settings.QueryInsightsSettings;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportRequest;
@@ -29,7 +27,6 @@
import java.io.IOException;
import java.util.List;
-import java.util.Locale;
/**
* Transport action for cluster/node level top queries information.
@@ -81,17 +78,18 @@ protected TopQueriesResponse newResponse(
final List responses,
final List failures
) {
- if (topQueriesRequest.getMetricType() == MetricType.LATENCY) {
- return new TopQueriesResponse(
- clusterService.getClusterName(),
- responses,
- failures,
- clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE),
- MetricType.LATENCY
- );
- } else {
- throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", topQueriesRequest.getMetricType()));
+ int size;
+ switch (topQueriesRequest.getMetricType()) {
+ case CPU:
+ size = clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_CPU_QUERIES_SIZE);
+ break;
+ case MEMORY:
+ size = clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_MEMORY_QUERIES_SIZE);
+ break;
+ default:
+ size = clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE);
}
+ return new TopQueriesResponse(clusterService.getClusterName(), responses, failures, size, topQueriesRequest.getMetricType());
}
@Override
@@ -107,15 +105,10 @@ protected TopQueries newNodeResponse(final StreamInput in) throws IOException {
@Override
protected TopQueries nodeOperation(final NodeRequest nodeRequest) {
final TopQueriesRequest request = nodeRequest.request;
- if (request.getMetricType() == MetricType.LATENCY) {
- return new TopQueries(
- clusterService.localNode(),
- queryInsightsService.getTopQueriesService(MetricType.LATENCY).getTopQueriesRecords(true)
- );
- } else {
- throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", request.getMetricType()));
- }
-
+ return new TopQueries(
+ clusterService.localNode(),
+ queryInsightsService.getTopQueriesService(request.getMetricType()).getTopQueriesRecords(true)
+ );
}
/**
diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java
index b2e01062e334c..25309b5721792 100644
--- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java
+++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java
@@ -12,6 +12,7 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.plugin.insights.core.exporter.SinkType;
+import org.opensearch.plugin.insights.rules.model.MetricType;
import java.util.Arrays;
import java.util.HashSet;
@@ -81,6 +82,10 @@ public class QueryInsightsSettings {
public static final String TOP_N_QUERIES_SETTING_PREFIX = "search.insights.top_queries";
/** Default prefix for top N queries by latency feature */
public static final String TOP_N_LATENCY_QUERIES_PREFIX = TOP_N_QUERIES_SETTING_PREFIX + ".latency";
+ /** Default prefix for top N queries by cpu feature */
+ public static final String TOP_N_CPU_QUERIES_PREFIX = TOP_N_QUERIES_SETTING_PREFIX + ".cpu";
+ /** Default prefix for top N queries by memory feature */
+ public static final String TOP_N_MEMORY_QUERIES_PREFIX = TOP_N_QUERIES_SETTING_PREFIX + ".memory";
/**
* Boolean setting for enabling top queries by latency.
*/
@@ -111,6 +116,66 @@ public class QueryInsightsSettings {
Setting.Property.Dynamic
);
+ /**
+ * Boolean setting for enabling top queries by cpu.
+ */
+ public static final Setting TOP_N_CPU_QUERIES_ENABLED = Setting.boolSetting(
+ TOP_N_CPU_QUERIES_PREFIX + ".enabled",
+ false,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Int setting to define the top n size for top queries by cpu.
+ */
+ public static final Setting TOP_N_CPU_QUERIES_SIZE = Setting.intSetting(
+ TOP_N_CPU_QUERIES_PREFIX + ".top_n_size",
+ DEFAULT_TOP_N_SIZE,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Time setting to define the window size in seconds for top queries by cpu.
+ */
+ public static final Setting TOP_N_CPU_QUERIES_WINDOW_SIZE = Setting.positiveTimeSetting(
+ TOP_N_CPU_QUERIES_PREFIX + ".window_size",
+ DEFAULT_WINDOW_SIZE,
+ Setting.Property.NodeScope,
+ Setting.Property.Dynamic
+ );
+
+ /**
+ * Boolean setting for enabling top queries by memory.
+ */
+ public static final Setting TOP_N_MEMORY_QUERIES_ENABLED = Setting.boolSetting(
+ TOP_N_MEMORY_QUERIES_PREFIX + ".enabled",
+ false,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Int setting to define the top n size for top queries by memory.
+ */
+ public static final Setting TOP_N_MEMORY_QUERIES_SIZE = Setting.intSetting(
+ TOP_N_MEMORY_QUERIES_PREFIX + ".top_n_size",
+ DEFAULT_TOP_N_SIZE,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Time setting to define the window size in seconds for top queries by memory.
+ */
+ public static final Setting TOP_N_MEMORY_QUERIES_WINDOW_SIZE = Setting.positiveTimeSetting(
+ TOP_N_MEMORY_QUERIES_PREFIX + ".window_size",
+ DEFAULT_WINDOW_SIZE,
+ Setting.Property.NodeScope,
+ Setting.Property.Dynamic
+ );
+
/**
* Config key for exporter type
*/
@@ -125,9 +190,17 @@ public class QueryInsightsSettings {
*/
private static final String TOP_N_LATENCY_QUERIES_EXPORTER_PREFIX = TOP_N_LATENCY_QUERIES_PREFIX + ".exporter.";
/**
- * Default index pattern of top n queries by latency
+ * Prefix for top n queries by cpu exporters
+ */
+ private static final String TOP_N_CPU_QUERIES_EXPORTER_PREFIX = TOP_N_CPU_QUERIES_PREFIX + ".exporter.";
+ /**
+ * Prefix for top n queries by memory exporters
*/
- public static final String DEFAULT_TOP_N_LATENCY_QUERIES_INDEX_PATTERN = "'top_queries_by_latency-'YYYY.MM.dd";
+ private static final String TOP_N_MEMORY_QUERIES_EXPORTER_PREFIX = TOP_N_MEMORY_QUERIES_PREFIX + ".exporter.";
+ /**
+ * Default index pattern of top n queries
+ */
+ public static final String DEFAULT_TOP_N_QUERIES_INDEX_PATTERN = "'top_queries-'YYYY.MM.dd";
/**
* Default exporter type of top queries
*/
@@ -142,6 +215,88 @@ public class QueryInsightsSettings {
Setting.Property.NodeScope
);
+ /**
+ * Settings for the exporter of top cpu queries
+ */
+ public static final Setting TOP_N_CPU_EXPORTER_SETTINGS = Setting.groupSetting(
+ TOP_N_CPU_QUERIES_EXPORTER_PREFIX,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Settings for the exporter of top cpu queries
+ */
+ public static final Setting TOP_N_MEMORY_EXPORTER_SETTINGS = Setting.groupSetting(
+ TOP_N_MEMORY_QUERIES_EXPORTER_PREFIX,
+ Setting.Property.Dynamic,
+ Setting.Property.NodeScope
+ );
+
+ /**
+ * Get the enabled setting based on type
+ * @param type MetricType
+ * @return enabled setting
+ */
+ public static Setting getTopNEnabledSetting(MetricType type) {
+ switch (type) {
+ case CPU:
+ return TOP_N_CPU_QUERIES_ENABLED;
+ case MEMORY:
+ return TOP_N_MEMORY_QUERIES_ENABLED;
+ default:
+ return TOP_N_LATENCY_QUERIES_ENABLED;
+ }
+ }
+
+ /**
+ * Get the top n size setting based on type
+ * @param type MetricType
+ * @return top n size setting
+ */
+ public static Setting getTopNSizeSetting(MetricType type) {
+ switch (type) {
+ case CPU:
+ return TOP_N_CPU_QUERIES_SIZE;
+ case MEMORY:
+ return TOP_N_MEMORY_QUERIES_SIZE;
+ default:
+ return TOP_N_LATENCY_QUERIES_SIZE;
+ }
+ }
+
+ /**
+ * Get the window size setting based on type
+ * @param type MetricType
+ * @return top n queries window size setting
+ */
+ public static Setting getTopNWindowSizeSetting(MetricType type) {
+ switch (type) {
+ case CPU:
+ return TOP_N_CPU_QUERIES_WINDOW_SIZE;
+ case MEMORY:
+ return TOP_N_MEMORY_QUERIES_WINDOW_SIZE;
+ default:
+ return TOP_N_LATENCY_QUERIES_WINDOW_SIZE;
+ }
+ }
+
+ /**
+ * Get the exporter settings based on type
+ * @param type MetricType
+ * @return exporter setting
+ */
+ public static Setting getExporterSettings(MetricType type) {
+ switch (type) {
+ case CPU:
+ return TOP_N_CPU_EXPORTER_SETTINGS;
+ case MEMORY:
+ return TOP_N_MEMORY_EXPORTER_SETTINGS;
+ default:
+ return TOP_N_LATENCY_EXPORTER_SETTINGS;
+ }
+ }
+
/**
* Default constructor
*/
diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java
index 8b8856e3e305c..2efe9085a39ee 100644
--- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java
+++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java
@@ -47,11 +47,7 @@ public void setup() {
Settings.Builder settingsBuilder = Settings.builder();
Settings settings = settingsBuilder.build();
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS);
-
+ QueryInsightsTestUtils.registerAllQueryInsightsSettings(clusterSettings);
clusterService = ClusterServiceUtils.createClusterService(settings, clusterSettings, threadPool);
}
@@ -61,7 +57,15 @@ public void testGetSettings() {
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED,
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE,
QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE,
- QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS
+ QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_ENABLED,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_SIZE,
+ QueryInsightsSettings.TOP_N_CPU_QUERIES_WINDOW_SIZE,
+ QueryInsightsSettings.TOP_N_CPU_EXPORTER_SETTINGS,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_ENABLED,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_SIZE,
+ QueryInsightsSettings.TOP_N_MEMORY_QUERIES_WINDOW_SIZE,
+ QueryInsightsSettings.TOP_N_MEMORY_EXPORTER_SETTINGS
),
queryInsightsPlugin.getSettings()
);
diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java
index 870ef5b9c8be9..7fa4e9841c20e 100644
--- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java
+++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java
@@ -10,6 +10,7 @@
import org.opensearch.action.search.SearchType;
import org.opensearch.cluster.node.DiscoveryNode;
+import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.util.Maps;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -17,6 +18,7 @@
import org.opensearch.plugin.insights.rules.model.Attribute;
import org.opensearch.plugin.insights.rules.model.MetricType;
import org.opensearch.plugin.insights.rules.model.SearchQueryRecord;
+import org.opensearch.plugin.insights.settings.QueryInsightsSettings;
import org.opensearch.test.VersionUtils;
import java.io.IOException;
@@ -36,7 +38,6 @@
import static org.opensearch.test.OpenSearchTestCase.random;
import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLengthBetween;
import static org.opensearch.test.OpenSearchTestCase.randomArray;
-import static org.opensearch.test.OpenSearchTestCase.randomDouble;
import static org.opensearch.test.OpenSearchTestCase.randomIntBetween;
import static org.opensearch.test.OpenSearchTestCase.randomLong;
import static org.opensearch.test.OpenSearchTestCase.randomLongBetween;
@@ -63,9 +64,9 @@ public static List generateQueryInsightRecords(int lower, int
MetricType.LATENCY,
randomLongBetween(1000, 10000),
MetricType.CPU,
- randomDouble(),
- MetricType.JVM,
- randomDouble()
+ randomLongBetween(1000, 10000),
+ MetricType.MEMORY,
+ randomLongBetween(1000, 10000)
);
Map phaseLatencyMap = new HashMap<>();
@@ -186,4 +187,19 @@ public static boolean checkRecordsEqualsWithoutOrder(
}
return true;
}
+
+ public static void registerAllQueryInsightsSettings(ClusterSettings clusterSettings) {
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_CPU_QUERIES_ENABLED);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_CPU_QUERIES_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_CPU_QUERIES_WINDOW_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_CPU_EXPORTER_SETTINGS);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_MEMORY_QUERIES_ENABLED);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_MEMORY_QUERIES_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_MEMORY_QUERIES_WINDOW_SIZE);
+ clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_MEMORY_EXPORTER_SETTINGS);
+ }
}
diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java
index 328ed0cd2ed15..86de44c680188 100644
--- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java
+++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java
@@ -11,27 +11,44 @@
import org.opensearch.action.search.SearchPhaseContext;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchRequestContext;
+import org.opensearch.action.search.SearchTask;
import org.opensearch.action.search.SearchType;
+import org.opensearch.action.support.replication.ClusterStateCreationUtils;
+import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.service.ClusterService;
+import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.concurrent.ThreadContext;
+import org.opensearch.common.util.io.IOUtils;
+import org.opensearch.core.tasks.TaskId;
+import org.opensearch.plugin.insights.QueryInsightsTestUtils;
import org.opensearch.plugin.insights.core.service.QueryInsightsService;
import org.opensearch.plugin.insights.core.service.TopQueriesService;
+import org.opensearch.plugin.insights.rules.model.Attribute;
import org.opensearch.plugin.insights.rules.model.MetricType;
-import org.opensearch.plugin.insights.settings.QueryInsightsSettings;
+import org.opensearch.plugin.insights.rules.model.SearchQueryRecord;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.opensearch.search.aggregations.support.ValueType;
import org.opensearch.search.builder.SearchSourceBuilder;
+import org.opensearch.tasks.Task;
import org.opensearch.test.ClusterServiceUtils;
import org.opensearch.test.OpenSearchTestCase;
+import org.opensearch.threadpool.TestThreadPool;
+import org.opensearch.threadpool.ThreadPool;
import org.junit.Before;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Phaser;
+import java.util.concurrent.TimeUnit;
+
+import org.mockito.ArgumentCaptor;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
@@ -48,6 +65,7 @@ public class QueryInsightsListenerTests extends OpenSearchTestCase {
private final SearchRequest searchRequest = mock(SearchRequest.class);
private final QueryInsightsService queryInsightsService = mock(QueryInsightsService.class);
private final TopQueriesService topQueriesService = mock(TopQueriesService.class);
+ private final ThreadPool threadPool = new TestThreadPool("QueryInsightsThreadPool");
private ClusterService clusterService;
@Before
@@ -55,14 +73,25 @@ public void setup() {
Settings.Builder settingsBuilder = Settings.builder();
Settings settings = settingsBuilder.build();
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE);
- clusterService = ClusterServiceUtils.createClusterService(settings, clusterSettings, null);
+ QueryInsightsTestUtils.registerAllQueryInsightsSettings(clusterSettings);
+ ClusterState state = ClusterStateCreationUtils.stateWithActivePrimary("test", true, 1 + randomInt(3), randomInt(2));
+ clusterService = ClusterServiceUtils.createClusterService(threadPool, state.getNodes().getLocalNode(), clusterSettings);
+ ClusterServiceUtils.setState(clusterService, state);
when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(true);
when(queryInsightsService.getTopQueriesService(MetricType.LATENCY)).thenReturn(topQueriesService);
+
+ ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
+ threadPool.getThreadContext().setHeaders(new Tuple<>(Collections.singletonMap(Task.X_OPAQUE_ID, "userLabel"), new HashMap<>()));
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ IOUtils.close(clusterService);
+ ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
+ @SuppressWarnings("unchecked")
public void testOnRequestEnd() throws InterruptedException {
Long timestamp = System.currentTimeMillis() - 100L;
SearchType searchType = SearchType.QUERY_THEN_FETCH;
@@ -70,6 +99,14 @@ public void testOnRequestEnd() throws InterruptedException {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword"));
searchSourceBuilder.size(0);
+ SearchTask task = new SearchTask(
+ 0,
+ "n/a",
+ "n/a",
+ () -> "test",
+ TaskId.EMPTY_TASK_ID,
+ Collections.singletonMap(Task.X_OPAQUE_ID, "userLabel")
+ );
String[] indices = new String[] { "index-1", "index-2" };
@@ -89,10 +126,19 @@ public void testOnRequestEnd() throws InterruptedException {
when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap);
when(searchPhaseContext.getRequest()).thenReturn(searchRequest);
when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards);
+ when(searchPhaseContext.getTask()).thenReturn(task);
+ ArgumentCaptor captor = ArgumentCaptor.forClass(SearchQueryRecord.class);
queryInsightsListener.onRequestEnd(searchPhaseContext, searchRequestContext);
- verify(queryInsightsService, times(1)).addRecord(any());
+ verify(queryInsightsService, times(1)).addRecord(captor.capture());
+ SearchQueryRecord generatedRecord = captor.getValue();
+ assertEquals(timestamp.longValue(), generatedRecord.getTimestamp());
+ assertEquals(numberOfShards, generatedRecord.getAttributes().get(Attribute.TOTAL_SHARDS));
+ assertEquals(searchType.toString().toLowerCase(Locale.ROOT), generatedRecord.getAttributes().get(Attribute.SEARCH_TYPE));
+ assertEquals(searchSourceBuilder.toString(), generatedRecord.getAttributes().get(Attribute.SOURCE));
+ Map labels = (Map) generatedRecord.getAttributes().get(Attribute.LABELS);
+ assertEquals("userLabel", labels.get(Task.X_OPAQUE_ID));
}
public void testConcurrentOnRequestEnd() throws InterruptedException {
@@ -102,6 +148,14 @@ public void testConcurrentOnRequestEnd() throws InterruptedException {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword"));
searchSourceBuilder.size(0);
+ SearchTask task = new SearchTask(
+ 0,
+ "n/a",
+ "n/a",
+ () -> "test",
+ TaskId.EMPTY_TASK_ID,
+ Collections.singletonMap(Task.X_OPAQUE_ID, "userLabel")
+ );
String[] indices = new String[] { "index-1", "index-2" };
@@ -121,6 +175,7 @@ public void testConcurrentOnRequestEnd() throws InterruptedException {
when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap);
when(searchPhaseContext.getRequest()).thenReturn(searchRequest);
when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards);
+ when(searchPhaseContext.getTask()).thenReturn(task);
int numRequests = 50;
Thread[] threads = new Thread[numRequests];
@@ -155,7 +210,7 @@ public void testSetEnabled() {
when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(false);
when(queryInsightsService.isCollectionEnabled(MetricType.CPU)).thenReturn(false);
- when(queryInsightsService.isCollectionEnabled(MetricType.JVM)).thenReturn(false);
+ when(queryInsightsService.isCollectionEnabled(MetricType.MEMORY)).thenReturn(false);
queryInsightsListener.setEnableTopQueries(MetricType.LATENCY, false);
assertFalse(queryInsightsListener.isEnabled());
}
diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java
index 428f615ce2f90..75a5768f50681 100644
--- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java
+++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java
@@ -34,11 +34,11 @@ public void setup() {
Settings.Builder settingsBuilder = Settings.builder();
Settings settings = settingsBuilder.build();
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
- clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_EXPORTER_SETTINGS);
+ QueryInsightsTestUtils.registerAllQueryInsightsSettings(clusterSettings);
queryInsightsService = new QueryInsightsService(clusterSettings, threadPool, client);
queryInsightsService.enableCollection(MetricType.LATENCY, true);
queryInsightsService.enableCollection(MetricType.CPU, true);
- queryInsightsService.enableCollection(MetricType.JVM, true);
+ queryInsightsService.enableCollection(MetricType.MEMORY, true);
}
public void testAddRecordToLimitAndDrain() {
diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java
index 793d5878e2300..ad45b53ec5363 100644
--- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java
+++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java
@@ -39,7 +39,7 @@ public void testSerializationAndEquals() throws Exception {
public void testAllMetricTypes() {
Set allMetrics = MetricType.allMetricTypes();
- Set expected = new HashSet<>(Arrays.asList(MetricType.LATENCY, MetricType.CPU, MetricType.JVM));
+ Set expected = new HashSet<>(Arrays.asList(MetricType.LATENCY, MetricType.CPU, MetricType.MEMORY));
assertEquals(expected, allMetrics);
}
diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle
index ff62c328c7e74..61e9f71712eaf 100644
--- a/plugins/repository-azure/build.gradle
+++ b/plugins/repository-azure/build.gradle
@@ -44,10 +44,11 @@ opensearchplugin {
}
dependencies {
- api 'com.azure:azure-core:1.47.0'
+ api 'com.azure:azure-core:1.49.1'
api 'com.azure:azure-json:1.1.0'
+ api 'com.azure:azure-xml:1.0.0'
api 'com.azure:azure-storage-common:12.21.2'
- api 'com.azure:azure-core-http-netty:1.12.8'
+ api 'com.azure:azure-core-http-netty:1.15.1'
api "io.netty:netty-codec-dns:${versions.netty}"
api "io.netty:netty-codec-socks:${versions.netty}"
api "io.netty:netty-codec-http2:${versions.netty}"
diff --git a/plugins/repository-azure/licenses/azure-core-1.47.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.47.0.jar.sha1
deleted file mode 100644
index 42e35aacc63b1..0000000000000
--- a/plugins/repository-azure/licenses/azure-core-1.47.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6b300175826f0bb0916fca2fa5f70885b716e93f
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-1.49.1.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.49.1.jar.sha1
new file mode 100644
index 0000000000000..d487c08c26e94
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-core-1.49.1.jar.sha1
@@ -0,0 +1 @@
+a7c44282eaa0f5a3be4b920d6a057509adfe8674
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.12.8.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.12.8.jar.sha1
deleted file mode 100644
index e6ee1dec64641..0000000000000
--- a/plugins/repository-azure/licenses/azure-core-http-netty-1.12.8.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-511ed2d02afb0f43f029df3d10ff80d2d3539f05
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1
new file mode 100644
index 0000000000000..3a0747a0daacb
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1
@@ -0,0 +1 @@
+036f7466a521aa99c79a491a9cf20444667df78b
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-xml-1.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-xml-1.0.0.jar.sha1
new file mode 100644
index 0000000000000..798ec5d95c6ac
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-xml-1.0.0.jar.sha1
@@ -0,0 +1 @@
+ba584703bd47e9e789343ee3332f0f5a64f7f187
\ No newline at end of file
diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java
index 90143d907cd99..b0582624e21d5 100644
--- a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java
+++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java
@@ -23,10 +23,13 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
+import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.Collectors;
+import io.opentelemetry.api.common.AttributeKey;
+import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData;
@@ -147,6 +150,36 @@ public void testGauge() throws Exception {
}
+ public void testGaugeWithValueAndTagSupplier() throws Exception {
+ String metricName = "test-gauge";
+ MetricsRegistry metricsRegistry = internalCluster().getInstance(MetricsRegistry.class);
+ InMemorySingletonMetricsExporter.INSTANCE.reset();
+ Tags tags = Tags.create().addTag("test", "integ-test");
+ final AtomicInteger testValue = new AtomicInteger(0);
+ Supplier valueProvider = () -> {
+ return TaggedMeasurement.create(Double.valueOf(testValue.incrementAndGet()), tags);
+ };
+ Closeable gaugeCloseable = metricsRegistry.createGauge(metricName, "test", "ms", valueProvider);
+ // Sleep for about 2.2s to wait for metrics to be published.
+ Thread.sleep(2200);
+
+ InMemorySingletonMetricsExporter exporter = InMemorySingletonMetricsExporter.INSTANCE;
+
+ assertTrue(getMaxObservableGaugeValue(exporter, metricName) >= 2.0);
+
+ gaugeCloseable.close();
+ double observableGaugeValueAfterStop = getMaxObservableGaugeValue(exporter, metricName);
+
+ Map, Object> attributes = getMetricAttributes(exporter, metricName);
+
+ assertEquals("integ-test", attributes.get(AttributeKey.stringKey("test")));
+
+ // Sleep for about 1.2s to wait for metrics to see that closed observableGauge shouldn't execute the callable.
+ Thread.sleep(1200);
+ assertEquals(observableGaugeValueAfterStop, getMaxObservableGaugeValue(exporter, metricName), 0.0);
+
+ }
+
private static double getMaxObservableGaugeValue(InMemorySingletonMetricsExporter exporter, String metricName) {
List dataPoints = exporter.getFinishedMetricItems()
.stream()
@@ -159,6 +192,15 @@ private static double getMaxObservableGaugeValue(InMemorySingletonMetricsExporte
return totalValue;
}
+ private static Map, Object> getMetricAttributes(InMemorySingletonMetricsExporter exporter, String metricName) {
+ List dataPoints = exporter.getFinishedMetricItems()
+ .stream()
+ .filter(a -> a.getName().contains(metricName))
+ .collect(Collectors.toList());
+ Attributes attributes = dataPoints.get(0).getDoubleGaugeData().getPoints().stream().findAny().get().getAttributes();
+ return attributes.asMap();
+ }
+
@After
public void reset() {
InMemorySingletonMetricsExporter.INSTANCE.reset();
diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java
index 6fe08040d7af5..3258e91738ba6 100644
--- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java
+++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java
@@ -101,6 +101,17 @@ public Closeable createGauge(String name, String description, String unit, Suppl
return () -> doubleObservableGauge.close();
}
+ @Override
+ public Closeable createGauge(String name, String description, String unit, Supplier value) {
+ ObservableDoubleGauge doubleObservableGauge = AccessController.doPrivileged(
+ (PrivilegedAction) () -> otelMeter.gaugeBuilder(name)
+ .setUnit(unit)
+ .setDescription(description)
+ .buildWithCallback(record -> record.record(value.get().getValue(), OTelAttributesConverter.convert(value.get().getTags())))
+ );
+ return () -> doubleObservableGauge.close();
+ }
+
@Override
public void close() throws IOException {
meterProvider.close();
diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java
index 2e89a3c488d5c..794cafc1fb608 100644
--- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java
+++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java
@@ -180,4 +180,34 @@ public void testGauge() throws Exception {
closeable.close();
verify(observableDoubleGauge).close();
}
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ public void testGaugeWithValueAndTagsSupplier() throws Exception {
+ String observableGaugeName = "test-gauge";
+ String description = "test";
+ String unit = "1";
+ Meter mockMeter = mock(Meter.class);
+ OpenTelemetry mockOpenTelemetry = mock(OpenTelemetry.class);
+ ObservableDoubleGauge observableDoubleGauge = mock(ObservableDoubleGauge.class);
+ DoubleGaugeBuilder mockOTelDoubleGaugeBuilder = mock(DoubleGaugeBuilder.class);
+ MeterProvider meterProvider = mock(MeterProvider.class);
+ when(meterProvider.get(OTelTelemetryPlugin.INSTRUMENTATION_SCOPE_NAME)).thenReturn(mockMeter);
+ MetricsTelemetry metricsTelemetry = new OTelMetricsTelemetry(
+ new RefCountedReleasable("telemetry", mockOpenTelemetry, () -> {}),
+ meterProvider
+ );
+ when(mockMeter.gaugeBuilder(Mockito.contains(observableGaugeName))).thenReturn(mockOTelDoubleGaugeBuilder);
+ when(mockOTelDoubleGaugeBuilder.setDescription(description)).thenReturn(mockOTelDoubleGaugeBuilder);
+ when(mockOTelDoubleGaugeBuilder.setUnit(unit)).thenReturn(mockOTelDoubleGaugeBuilder);
+ when(mockOTelDoubleGaugeBuilder.buildWithCallback(any(Consumer.class))).thenReturn(observableDoubleGauge);
+
+ Closeable closeable = metricsTelemetry.createGauge(
+ observableGaugeName,
+ description,
+ unit,
+ () -> TaggedMeasurement.create(1.0, Tags.EMPTY)
+ );
+ closeable.close();
+ verify(observableDoubleGauge).close();
+ }
}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpChunk.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpChunk.java
new file mode 100644
index 0000000000000..3b4a308691e7b
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpChunk.java
@@ -0,0 +1,49 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.http.HttpChunk;
+import org.opensearch.transport.reactor.netty4.Netty4Utils;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import io.netty.buffer.ByteBuf;
+
+class ReactorNetty4HttpChunk implements HttpChunk {
+ private final AtomicBoolean released;
+ private final boolean pooled;
+ private final ByteBuf content;
+ private final boolean last;
+
+ ReactorNetty4HttpChunk(ByteBuf content, boolean last) {
+ this.content = content;
+ this.pooled = true;
+ this.released = new AtomicBoolean(false);
+ this.last = last;
+ }
+
+ @Override
+ public BytesReference content() {
+ assert released.get() == false;
+ return Netty4Utils.toBytesReference(content);
+ }
+
+ @Override
+ public void close() {
+ if (pooled && released.compareAndSet(false, true)) {
+ content.release();
+ }
+ }
+
+ @Override
+ public boolean isLast() {
+ return last;
+ }
+}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpRequest.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpRequest.java
index 4406c555a5b04..491c7aa885103 100644
--- a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpRequest.java
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpRequest.java
@@ -44,6 +44,10 @@ class ReactorNetty4HttpRequest implements HttpRequest {
private final Exception inboundException;
private final boolean pooled;
+ ReactorNetty4HttpRequest(HttpServerRequest request) {
+ this(request, new HttpHeadersMap(request.requestHeaders()), new AtomicBoolean(false), false, Unpooled.EMPTY_BUFFER);
+ }
+
ReactorNetty4HttpRequest(HttpServerRequest request, ByteBuf content) {
this(request, new HttpHeadersMap(request.requestHeaders()), new AtomicBoolean(false), true, content);
}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransport.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransport.java
index bd1646d753016..906bbfd072da8 100644
--- a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransport.java
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransport.java
@@ -26,6 +26,8 @@
import org.opensearch.http.HttpServerChannel;
import org.opensearch.http.reactor.netty4.ssl.SslUtils;
import org.opensearch.plugins.SecureHttpTransportSettingsProvider;
+import org.opensearch.rest.RestHandler;
+import org.opensearch.rest.RestRequest.Method;
import org.opensearch.telemetry.tracing.Tracer;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.reactor.SharedGroupFactory;
@@ -40,6 +42,7 @@
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
+import java.util.Optional;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.ChannelOption;
@@ -351,24 +354,45 @@ public List protocols() {
* @return response publisher
*/
protected Publisher incomingRequest(HttpServerRequest request, HttpServerResponse response) {
- final NonStreamingRequestConsumer consumer = new NonStreamingRequestConsumer<>(
- this,
- request,
- response,
- maxCompositeBufferComponents
+ final Method method = HttpConversionUtil.convertMethod(request.method());
+ final Optional dispatchHandlerOpt = dispatcher.dispatchHandler(
+ request.uri(),
+ request.fullPath(),
+ method,
+ request.params()
);
+ if (dispatchHandlerOpt.map(RestHandler::supportsStreaming).orElse(false)) {
+ final ReactorNetty4StreamingRequestConsumer consumer = new ReactorNetty4StreamingRequestConsumer<>(
+ request,
+ response
+ );
+
+ request.receiveContent()
+ .switchIfEmpty(Mono.just(DefaultLastHttpContent.EMPTY_LAST_CONTENT))
+ .subscribe(consumer, error -> {}, () -> consumer.accept(DefaultLastHttpContent.EMPTY_LAST_CONTENT));
+
+ incomingStream(new ReactorNetty4HttpRequest(request), consumer.httpChannel());
+ return response.sendObject(consumer);
+ } else {
+ final ReactorNetty4NonStreamingRequestConsumer consumer = new ReactorNetty4NonStreamingRequestConsumer<>(
+ this,
+ request,
+ response,
+ maxCompositeBufferComponents
+ );
- request.receiveContent().switchIfEmpty(Mono.just(DefaultLastHttpContent.EMPTY_LAST_CONTENT)).subscribe(consumer);
-
- return Mono.from(consumer).flatMap(hc -> {
- final FullHttpResponse r = (FullHttpResponse) hc;
- response.status(r.status());
- response.trailerHeaders(c -> r.trailingHeaders().forEach(h -> c.add(h.getKey(), h.getValue())));
- response.chunkedTransfer(false);
- response.compression(true);
- r.headers().forEach(h -> response.addHeader(h.getKey(), h.getValue()));
- return Mono.from(response.sendObject(r.content()));
- });
+ request.receiveContent().switchIfEmpty(Mono.just(DefaultLastHttpContent.EMPTY_LAST_CONTENT)).subscribe(consumer);
+
+ return Mono.from(consumer).flatMap(hc -> {
+ final FullHttpResponse r = (FullHttpResponse) hc;
+ response.status(r.status());
+ response.trailerHeaders(c -> r.trailingHeaders().forEach(h -> c.add(h.getKey(), h.getValue())));
+ response.chunkedTransfer(false);
+ response.compression(true);
+ r.headers().forEach(h -> response.addHeader(h.getKey(), h.getValue()));
+ return Mono.from(response.sendObject(r.content()));
+ });
+ }
}
/**
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingHttpChannel.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingHttpChannel.java
similarity index 92%
rename from plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingHttpChannel.java
rename to plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingHttpChannel.java
index 98b359319ff1b..7df0b3c0c35fe 100644
--- a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingHttpChannel.java
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingHttpChannel.java
@@ -23,13 +23,13 @@
import reactor.netty.http.server.HttpServerRequest;
import reactor.netty.http.server.HttpServerResponse;
-class NonStreamingHttpChannel implements HttpChannel {
+class ReactorNetty4NonStreamingHttpChannel implements HttpChannel {
private final HttpServerRequest request;
private final HttpServerResponse response;
private final CompletableContext closeContext = new CompletableContext<>();
private final FluxSink emitter;
- NonStreamingHttpChannel(HttpServerRequest request, HttpServerResponse response, FluxSink emitter) {
+ ReactorNetty4NonStreamingHttpChannel(HttpServerRequest request, HttpServerResponse response, FluxSink emitter) {
this.request = request;
this.response = response;
this.emitter = emitter;
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingRequestConsumer.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingRequestConsumer.java
similarity index 89%
rename from plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingRequestConsumer.java
rename to plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingRequestConsumer.java
index d43e23e800e65..c09e7755b1670 100644
--- a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/NonStreamingRequestConsumer.java
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4NonStreamingRequestConsumer.java
@@ -25,7 +25,7 @@
import reactor.netty.http.server.HttpServerRequest;
import reactor.netty.http.server.HttpServerResponse;
-class NonStreamingRequestConsumer implements Consumer, Publisher, Disposable {
+class ReactorNetty4NonStreamingRequestConsumer implements Consumer, Publisher, Disposable {
private final HttpServerRequest request;
private final HttpServerResponse response;
private final CompositeByteBuf content;
@@ -34,7 +34,7 @@ class NonStreamingRequestConsumer implements Consumer,
private final AtomicBoolean disposed = new AtomicBoolean(false);
private volatile FluxSink emitter;
- NonStreamingRequestConsumer(
+ ReactorNetty4NonStreamingRequestConsumer(
AbstractHttpServerTransport transport,
HttpServerRequest request,
HttpServerResponse response,
@@ -64,12 +64,12 @@ public void accept(T message) {
}
}
- public void process(HttpContent in, FluxSink emitter) {
+ void process(HttpContent in, FluxSink emitter) {
// Consume request body in full before dispatching it
content.addComponent(true, in.content().retain());
if (in instanceof LastHttpContent) {
- final NonStreamingHttpChannel channel = new NonStreamingHttpChannel(request, response, emitter);
+ final ReactorNetty4NonStreamingHttpChannel channel = new ReactorNetty4NonStreamingHttpChannel(request, response, emitter);
final HttpRequest r = createRequest(request, content);
try {
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingHttpChannel.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingHttpChannel.java
new file mode 100644
index 0000000000000..56dadea0477c5
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingHttpChannel.java
@@ -0,0 +1,132 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.opensearch.common.concurrent.CompletableContext;
+import org.opensearch.core.action.ActionListener;
+import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.http.HttpChunk;
+import org.opensearch.http.HttpResponse;
+import org.opensearch.http.StreamingHttpChannel;
+import org.opensearch.transport.reactor.netty4.Netty4Utils;
+
+import java.net.InetSocketAddress;
+import java.util.List;
+import java.util.Map;
+
+import io.netty.buffer.Unpooled;
+import io.netty.handler.codec.http.DefaultHttpContent;
+import io.netty.handler.codec.http.FullHttpResponse;
+import io.netty.handler.codec.http.HttpContent;
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.FluxSink;
+import reactor.netty.http.server.HttpServerRequest;
+import reactor.netty.http.server.HttpServerResponse;
+
+class ReactorNetty4StreamingHttpChannel implements StreamingHttpChannel {
+ private final HttpServerRequest request;
+ private final HttpServerResponse response;
+ private final CompletableContext closeContext = new CompletableContext<>();
+ private final Publisher receiver;
+ private final StreamingHttpContentSender sender;
+ private volatile FluxSink producer;
+ private volatile boolean lastChunkReceived = false;
+
+ ReactorNetty4StreamingHttpChannel(HttpServerRequest request, HttpServerResponse response, StreamingHttpContentSender sender) {
+ this.request = request;
+ this.response = response;
+ this.sender = sender;
+ this.receiver = Flux.create(producer -> this.producer = producer);
+ this.request.withConnection(connection -> Netty4Utils.addListener(connection.channel().closeFuture(), closeContext));
+ }
+
+ @Override
+ public boolean isOpen() {
+ return true;
+ }
+
+ @Override
+ public void close() {
+ request.withConnection(connection -> connection.channel().close());
+ }
+
+ @Override
+ public void addCloseListener(ActionListener listener) {
+ closeContext.addListener(ActionListener.toBiConsumer(listener));
+ }
+
+ @Override
+ public void sendChunk(HttpChunk chunk, ActionListener listener) {
+ sender.send(createContent(chunk), listener, chunk.isLast());
+ }
+
+ @Override
+ public void sendResponse(HttpResponse response, ActionListener listener) {
+ sender.send(createContent(response), listener, true);
+ }
+
+ @Override
+ public void prepareResponse(int status, Map> headers) {
+ this.response.status(status);
+ headers.forEach((k, vs) -> vs.forEach(v -> this.response.addHeader(k, v)));
+ }
+
+ @Override
+ public InetSocketAddress getRemoteAddress() {
+ return (InetSocketAddress) response.remoteAddress();
+ }
+
+ @Override
+ public InetSocketAddress getLocalAddress() {
+ return (InetSocketAddress) response.hostAddress();
+ }
+
+ @Override
+ public void receiveChunk(HttpChunk message) {
+ try {
+ if (lastChunkReceived) {
+ return;
+ }
+
+ producer.next(message);
+ if (message.isLast()) {
+ lastChunkReceived = true;
+ producer.complete();
+ }
+ } finally {
+ message.close();
+ }
+ }
+
+ @Override
+ public boolean isReadable() {
+ return producer != null;
+ }
+
+ @Override
+ public boolean isWritable() {
+ return sender.isReady();
+ }
+
+ @Override
+ public void subscribe(Subscriber super HttpChunk> subscriber) {
+ receiver.subscribe(subscriber);
+ }
+
+ private static HttpContent createContent(HttpResponse response) {
+ final FullHttpResponse fullHttpResponse = (FullHttpResponse) response;
+ return new DefaultHttpContent(fullHttpResponse.content());
+ }
+
+ private static HttpContent createContent(HttpChunk chunk) {
+ return new DefaultHttpContent(Unpooled.copiedBuffer(BytesReference.toByteBuffers(chunk.content())));
+ }
+}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingRequestConsumer.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingRequestConsumer.java
new file mode 100644
index 0000000000000..f34f54e561021
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingRequestConsumer.java
@@ -0,0 +1,53 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.opensearch.http.HttpChunk;
+import org.opensearch.http.StreamingHttpChannel;
+
+import java.util.function.Consumer;
+
+import io.netty.handler.codec.http.HttpContent;
+import io.netty.handler.codec.http.LastHttpContent;
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+import reactor.netty.http.server.HttpServerRequest;
+import reactor.netty.http.server.HttpServerResponse;
+
+class ReactorNetty4StreamingRequestConsumer implements Consumer, Publisher {
+ private final ReactorNetty4StreamingResponseProducer sender;
+ private final StreamingHttpChannel httpChannel;
+
+ ReactorNetty4StreamingRequestConsumer(HttpServerRequest request, HttpServerResponse response) {
+ this.sender = new ReactorNetty4StreamingResponseProducer();
+ this.httpChannel = new ReactorNetty4StreamingHttpChannel(request, response, sender);
+ }
+
+ @Override
+ public void accept(T message) {
+ if (message instanceof LastHttpContent) {
+ httpChannel.receiveChunk(createChunk(message, true));
+ } else if (message instanceof HttpContent) {
+ httpChannel.receiveChunk(createChunk(message, false));
+ }
+ }
+
+ @Override
+ public void subscribe(Subscriber super HttpContent> s) {
+ sender.subscribe(s);
+ }
+
+ HttpChunk createChunk(HttpContent chunk, boolean last) {
+ return new ReactorNetty4HttpChunk(chunk.content().retain(), last);
+ }
+
+ StreamingHttpChannel httpChannel() {
+ return httpChannel;
+ }
+}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingResponseProducer.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingResponseProducer.java
new file mode 100644
index 0000000000000..616edccdfc396
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/ReactorNetty4StreamingResponseProducer.java
@@ -0,0 +1,54 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.opensearch.core.action.ActionListener;
+
+import io.netty.handler.codec.http.HttpContent;
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.FluxSink;
+
+class ReactorNetty4StreamingResponseProducer implements StreamingHttpContentSender, Publisher {
+ private final Publisher sender;
+ private volatile FluxSink emitter;
+
+ ReactorNetty4StreamingResponseProducer() {
+ this.sender = Flux.create(emitter -> this.emitter = emitter);
+ }
+
+ @Override
+ public void send(HttpContent content, ActionListener listener, boolean isLast) {
+ try {
+ emitter.next(content);
+ listener.onResponse(null);
+ if (isLast) {
+ emitter.complete();
+ }
+ } catch (final Exception ex) {
+ emitter.error(ex);
+ listener.onFailure(ex);
+ }
+ }
+
+ @Override
+ public void subscribe(Subscriber super HttpContent> s) {
+ sender.subscribe(s);
+ }
+
+ @Override
+ public boolean isReady() {
+ return emitter != null;
+ }
+
+ FluxSink emitter() {
+ return emitter;
+ }
+}
diff --git a/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/StreamingHttpContentSender.java b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/StreamingHttpContentSender.java
new file mode 100644
index 0000000000000..f07d6fbb88349
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/main/java/org/opensearch/http/reactor/netty4/StreamingHttpContentSender.java
@@ -0,0 +1,32 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.opensearch.core.action.ActionListener;
+
+import io.netty.handler.codec.http.HttpContent;
+
+/**
+ * The generic interface for chunked {@link HttpContent} producers (response streaming).
+ */
+interface StreamingHttpContentSender {
+ /**
+ * Sends the next {@link HttpContent} over the wire
+ * @param content next {@link HttpContent}
+ * @param listener action listener
+ * @param isLast {@code true} if this is the last chunk, {@code false} otherwise
+ */
+ void send(HttpContent content, ActionListener listener, boolean isLast);
+
+ /**
+ * Returns {@code true} is this channel is ready for streaming response data, {@code false} otherwise
+ * @return {@code true} is this channel is ready for streaming response data, {@code false} otherwise
+ */
+ boolean isReady();
+}
diff --git a/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorHttpClient.java b/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorHttpClient.java
index 920c895205023..0953e51484bd3 100644
--- a/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorHttpClient.java
+++ b/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorHttpClient.java
@@ -14,16 +14,22 @@
package org.opensearch.http.reactor.netty4;
import org.opensearch.common.collect.Tuple;
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.tasks.Task;
import org.opensearch.test.OpenSearchTestCase;
import java.io.Closeable;
+import java.io.IOException;
+import java.io.UncheckedIOException;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import java.util.stream.Stream;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
@@ -36,6 +42,7 @@
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpMethod;
+import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http2.HttpConversionUtil;
@@ -121,6 +128,11 @@ public final FullHttpResponse send(InetSocketAddress remoteAddress, FullHttpRequ
return responses.get(0);
}
+ public final FullHttpResponse stream(InetSocketAddress remoteAddress, HttpRequest httpRequest, Stream stream)
+ throws InterruptedException {
+ return sendRequestStream(remoteAddress, httpRequest, stream);
+ }
+
public final FullHttpResponse send(InetSocketAddress remoteAddress, FullHttpRequest httpRequest, HttpContent content)
throws InterruptedException {
final List responses = sendRequests(
@@ -207,6 +219,46 @@ private List sendRequests(
}
}
+ private FullHttpResponse sendRequestStream(
+ final InetSocketAddress remoteAddress,
+ final HttpRequest request,
+ final Stream stream
+ ) {
+ final NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(1);
+ try {
+ final HttpClient client = createClient(remoteAddress, eventLoopGroup);
+
+ return client.headers(h -> h.add(request.headers()))
+ .baseUrl(request.getUri())
+ .request(request.method())
+ .send(Flux.fromStream(stream).map(s -> {
+ try (XContentBuilder builder = XContentType.JSON.contentBuilder()) {
+ return Unpooled.wrappedBuffer(
+ s.toXContent(builder, ToXContent.EMPTY_PARAMS).toString().getBytes(StandardCharsets.UTF_8)
+ );
+ } catch (final IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }))
+ .response(
+ (r, c) -> c.aggregate()
+ .map(
+ b -> new DefaultFullHttpResponse(
+ r.version(),
+ r.status(),
+ b.retain(),
+ r.responseHeaders(),
+ EmptyHttpHeaders.INSTANCE
+ )
+ )
+ )
+ .blockLast();
+
+ } finally {
+ eventLoopGroup.shutdownGracefully().awaitUninterruptibly();
+ }
+ }
+
private HttpClient createClient(final InetSocketAddress remoteAddress, final NioEventLoopGroup eventLoopGroup) {
final HttpClient client = HttpClient.newConnection()
.resolver(DefaultAddressResolverGroup.INSTANCE)
diff --git a/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransportStreamingTests.java b/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransportStreamingTests.java
new file mode 100644
index 0000000000000..a7bf71e58e9b6
--- /dev/null
+++ b/plugins/transport-reactor-netty4/src/test/java/org/opensearch/http/reactor/netty4/ReactorNetty4HttpServerTransportStreamingTests.java
@@ -0,0 +1,211 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.http.reactor.netty4;
+
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.common.lease.Releasable;
+import org.opensearch.common.network.NetworkService;
+import org.opensearch.common.settings.ClusterSettings;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.MockBigArrays;
+import org.opensearch.common.util.MockPageCacheRecycler;
+import org.opensearch.common.util.concurrent.ThreadContext;
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.common.xcontent.support.XContentHttpChunk;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.http.HttpServerTransport;
+import org.opensearch.rest.RestChannel;
+import org.opensearch.rest.RestHandler;
+import org.opensearch.rest.RestRequest;
+import org.opensearch.rest.RestRequest.Method;
+import org.opensearch.rest.StreamingRestChannel;
+import org.opensearch.telemetry.tracing.noop.NoopTracer;
+import org.opensearch.test.OpenSearchTestCase;
+import org.opensearch.test.rest.FakeRestRequest;
+import org.opensearch.threadpool.TestThreadPool;
+import org.opensearch.threadpool.ThreadPool;
+import org.opensearch.transport.reactor.SharedGroupFactory;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.nio.charset.StandardCharsets;
+import java.time.Duration;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import io.netty.handler.codec.http.DefaultHttpRequest;
+import io.netty.handler.codec.http.FullHttpResponse;
+import io.netty.handler.codec.http.HttpMethod;
+import io.netty.handler.codec.http.HttpRequest;
+import io.netty.handler.codec.http.HttpResponseStatus;
+import io.netty.handler.codec.http.HttpVersion;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Tests for the {@link ReactorNetty4HttpServerTransport} class with streaming support.
+ */
+public class ReactorNetty4HttpServerTransportStreamingTests extends OpenSearchTestCase {
+ private static final Function XCONTENT_CONVERTER = (str) -> new ToXContent() {
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ return builder.startObject().field("doc", str).endObject();
+ }
+ };
+
+ private NetworkService networkService;
+ private ThreadPool threadPool;
+ private MockBigArrays bigArrays;
+ private ClusterSettings clusterSettings;
+
+ @Before
+ public void setup() throws Exception {
+ networkService = new NetworkService(Collections.emptyList());
+ threadPool = new TestThreadPool("test");
+ bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
+ clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
+ }
+
+ @After
+ public void shutdown() throws Exception {
+ if (threadPool != null) {
+ threadPool.shutdownNow();
+ }
+ threadPool = null;
+ networkService = null;
+ bigArrays = null;
+ clusterSettings = null;
+ }
+
+ public void testRequestResponseStreaming() throws InterruptedException {
+ final String responseString = randomAlphaOfLength(4 * 1024);
+ final String url = "/stream/";
+
+ final ToXContent[] chunks = newChunks(responseString);
+ final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() {
+ @Override
+ public Optional dispatchHandler(String uri, String rawPath, Method method, Map params) {
+ return Optional.of(new RestHandler() {
+ @Override
+ public boolean supportsStreaming() {
+ return true;
+ }
+
+ @Override
+ public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception {
+ logger.error("--> Unexpected request [{}]", request.uri());
+ throw new AssertionError();
+ }
+ });
+ }
+
+ @Override
+ public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) {
+ if (url.equals(request.uri())) {
+ assertThat(channel, instanceOf(StreamingRestChannel.class));
+ final StreamingRestChannel streamingChannel = (StreamingRestChannel) channel;
+
+ // Await at most 5 seconds till channel is ready for writing the response stream, fail otherwise
+ final Mono> ready = Mono.fromRunnable(() -> {
+ while (!streamingChannel.isWritable()) {
+ Thread.onSpinWait();
+ }
+ }).timeout(Duration.ofSeconds(5));
+
+ threadPool.executor(ThreadPool.Names.WRITE)
+ .execute(() -> Flux.concat(Flux.fromArray(newChunks(responseString)).map(e -> {
+ try (XContentBuilder builder = channel.newBuilder(XContentType.JSON, true)) {
+ return XContentHttpChunk.from(e.toXContent(builder, ToXContent.EMPTY_PARAMS));
+ } catch (final IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }), Mono.just(XContentHttpChunk.last()))
+ .delaySubscription(ready)
+ .subscribe(streamingChannel::sendChunk, null, () -> {
+ if (channel.bytesOutput() instanceof Releasable) {
+ ((Releasable) channel.bytesOutput()).close();
+ }
+ }));
+ } else {
+ logger.error("--> Unexpected successful uri [{}]", request.uri());
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
+ logger.error(
+ new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())),
+ cause
+ );
+ throw new AssertionError();
+ }
+
+ };
+
+ try (
+ ReactorNetty4HttpServerTransport transport = new ReactorNetty4HttpServerTransport(
+ Settings.EMPTY,
+ networkService,
+ bigArrays,
+ threadPool,
+ xContentRegistry(),
+ dispatcher,
+ clusterSettings,
+ new SharedGroupFactory(Settings.EMPTY),
+ NoopTracer.INSTANCE
+ )
+ ) {
+ transport.start();
+ final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses());
+
+ try (ReactorHttpClient client = ReactorHttpClient.create(false)) {
+ HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url);
+ final FullHttpResponse response = client.stream(remoteAddress.address(), request, Arrays.stream(chunks));
+ try {
+ assertThat(response.status(), equalTo(HttpResponseStatus.OK));
+ byte[] bytes = new byte[response.content().readableBytes()];
+ response.content().readBytes(bytes);
+ assertThat(new String(bytes, StandardCharsets.UTF_8), equalTo(Arrays.stream(newChunks(responseString)).map(s -> {
+ try (XContentBuilder builder = XContentType.JSON.contentBuilder()) {
+ return s.toXContent(builder, ToXContent.EMPTY_PARAMS).toString();
+ } catch (final IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }).collect(Collectors.joining(""))));
+ } finally {
+ response.release();
+ }
+ }
+ }
+ }
+
+ private static ToXContent[] newChunks(final String responseString) {
+ final ToXContent[] chunks = new ToXContent[responseString.length() / 16];
+
+ for (int chunk = 0; chunk < responseString.length(); chunk += 16) {
+ chunks[chunk / 16] = XCONTENT_CONVERTER.apply(responseString.substring(chunk, chunk + 16));
+ }
+
+ return chunks;
+ }
+}
diff --git a/release-notes/opensearch.release-notes-2.15.0.md b/release-notes/opensearch.release-notes-2.15.0.md
new file mode 100644
index 0000000000000..e3b7cfc0558f3
--- /dev/null
+++ b/release-notes/opensearch.release-notes-2.15.0.md
@@ -0,0 +1,75 @@
+## 2024-06-12 Version 2.15.0 Release Notes
+
+## [2.15.0]
+### Added
+- Add leader and follower check failure counter metrics ([#12439](https://github.com/opensearch-project/OpenSearch/pull/12439))
+- Add latency metrics for instrumenting critical clusterManager code paths ([#12333](https://github.com/opensearch-project/OpenSearch/pull/12333))
+- Add support for Azure Managed Identity in repository-azure ([#12423](https://github.com/opensearch-project/OpenSearch/issues/12423))
+- Add useCompoundFile index setting ([#13478](https://github.com/opensearch-project/OpenSearch/pull/13478))
+- Make outbound side of transport protocol dependent ([#13293](https://github.com/opensearch-project/OpenSearch/pull/13293))
+- [Remote Store] Upload translog checkpoint as object metadata to translog.tlog([#13637](https://github.com/opensearch-project/OpenSearch/pull/13637))
+- [Remote Store] Add dynamic cluster settings to set timeout for segments upload to Remote Store ([#13679](https://github.com/opensearch-project/OpenSearch/pull/13679))
+- Add getMetadataFields to MapperService ([#13819](https://github.com/opensearch-project/OpenSearch/pull/13819))
+- Add "wildcard" field type that supports efficient wildcard, prefix, and regexp queries ([#13461](https://github.com/opensearch-project/OpenSearch/pull/13461))
+- Allow setting query parameters on requests ([#13776](https://github.com/opensearch-project/OpenSearch/issues/13776))
+- Add dynamic action retry timeout setting ([#14022](https://github.com/opensearch-project/OpenSearch/issues/14022))
+- Add capability to disable source recovery_source for an index ([#13590](https://github.com/opensearch-project/OpenSearch/pull/13590))
+- Add remote routing table for remote state publication with experimental feature flag ([#13304](https://github.com/opensearch-project/OpenSearch/pull/13304))
+- Add upload flow for writing routing table to remote store ([#13870](https://github.com/opensearch-project/OpenSearch/pull/13870))
+- Add dynamic action retry timeout setting ([#14022](https://github.com/opensearch-project/OpenSearch/issues/14022))
+- [Remote Store] Add support to disable flush based on translog reader count ([#14027](https://github.com/opensearch-project/OpenSearch/pull/14027))
+- Add recovery chunk size setting ([#13997](https://github.com/opensearch-project/OpenSearch/pull/13997))
+- [Query Insights] Add exporter support for top n queries ([#12982](https://github.com/opensearch-project/OpenSearch/pull/12982))
+- [Query Insights] Add X-Opaque-Id to search request metadata for top n queries ([#13374](https://github.com/opensearch-project/OpenSearch/pull/13374))
+- [Streaming Indexing] Enhance RestAction with request / response streaming support ([#13772](https://github.com/opensearch-project/OpenSearch/pull/13772))
+- Move Remote Store Migration from DocRep to GA and modify remote migration settings name ([#14100](https://github.com/opensearch-project/OpenSearch/pull/14100))
+- [Remote State] Add async remote state deletion task running on an interval, configurable by a setting ([#13995](https://github.com/opensearch-project/OpenSearch/pull/13995))
+- Add remote routing table for remote state publication with experimental feature flag ([#13304](https://github.com/opensearch-project/OpenSearch/pull/13304))
+- Add support for query level resource usage tracking ([#13172](https://github.com/opensearch-project/OpenSearch/pull/13172))
+- [Query Insights] Add cpu and memory metrics to top n queries ([#13739](https://github.com/opensearch-project/OpenSearch/pull/13739))
+- Derived field object type support ([#13720](https://github.com/opensearch-project/OpenSearch/pull/13720))
+- Support Dynamic Pruning in Cardinality Aggregation ([#13821](https://github.com/opensearch-project/OpenSearch/pull/13821))
+
+### Dependencies
+- Bump `com.github.spullara.mustache.java:compiler` from 0.9.10 to 0.9.13 ([#13329](https://github.com/opensearch-project/OpenSearch/pull/13329), [#13559](https://github.com/opensearch-project/OpenSearch/pull/13559))
+- Bump `org.apache.commons:commons-text` from 1.11.0 to 1.12.0 ([#13557](https://github.com/opensearch-project/OpenSearch/pull/13557))
+- Bump `org.hdrhistogram:HdrHistogram` from 2.1.12 to 2.2.2 ([#13556](https://github.com/opensearch-project/OpenSearch/pull/13556), [#13986](https://github.com/opensearch-project/OpenSearch/pull/13986))
+- Bump `com.gradle.enterprise` from 3.17.2 to 3.17.4 ([#13641](https://github.com/opensearch-project/OpenSearch/pull/13641), [#13753](https://github.com/opensearch-project/OpenSearch/pull/13753))
+- Bump `org.apache.hadoop:hadoop-minicluster` from 3.3.6 to 3.4.0 ([#13642](https://github.com/opensearch-project/OpenSearch/pull/13642))
+- Bump `mockito` from 5.11.0 to 5.12.0 ([#13665](https://github.com/opensearch-project/OpenSearch/pull/13665))
+- Bump `com.google.code.gson:gson` from 2.10.1 to 2.11.0 ([#13752](https://github.com/opensearch-project/OpenSearch/pull/13752))
+- Bump `ch.qos.logback:logback-core` from 1.5.3 to 1.5.6 ([#13756](https://github.com/opensearch-project/OpenSearch/pull/13756))
+- Bump `netty` from 4.1.109.Final to 4.1.110.Final ([#13802](https://github.com/opensearch-project/OpenSearch/pull/13802))
+- Bump `jackson` from 2.17.0 to 2.17.1 ([#13817](https://github.com/opensearch-project/OpenSearch/pull/13817))
+- Bump `reactor` from 3.5.15 to 3.5.17 ([#13825](https://github.com/opensearch-project/OpenSearch/pull/13825))
+- Bump `reactor-netty` from 1.1.17 to 1.1.19 ([#13825](https://github.com/opensearch-project/OpenSearch/pull/13825))
+- Bump `commons-cli:commons-cli` from 1.7.0 to 1.8.0 ([#13840](https://github.com/opensearch-project/OpenSearch/pull/13840))
+- Bump `org.apache.xmlbeans:xmlbeans` from 5.2.0 to 5.2.1 ([#13839](https://github.com/opensearch-project/OpenSearch/pull/13839))
+- Bump `actions/checkout` from 3 to 4 ([#13935](https://github.com/opensearch-project/OpenSearch/pull/13935))
+- Bump `com.netflix.nebula.ospackage-base` from 11.9.0 to 11.9.1 ([#13933](https://github.com/opensearch-project/OpenSearch/pull/13933))
+- Bump `com.azure:azure-core-http-netty` from 1.12.8 to 1.15.1 ([#14128](https://github.com/opensearch-project/OpenSearch/pull/14128))
+- Bump `tim-actions/get-pr-commits` from 1.1.0 to 1.3.1 ([#14126](https://github.com/opensearch-project/OpenSearch/pull/14126))
+
+### Changed
+- Add ability for Boolean and date field queries to run when only doc_values are enabled ([#11650](https://github.com/opensearch-project/OpenSearch/pull/11650))
+- Refactor implementations of query phase searcher, allow QueryCollectorContext to have zero collectors ([#13481](https://github.com/opensearch-project/OpenSearch/pull/13481))
+- Adds support to inject telemetry instances to plugins ([#13636](https://github.com/opensearch-project/OpenSearch/pull/13636))
+- Adds support to provide tags with value in Gauge metric. ([#13994](https://github.com/opensearch-project/OpenSearch/pull/13994))
+- Move cache removal notifications outside lru lock ([#14017](https://github.com/opensearch-project/OpenSearch/pull/14017))
+
+### Removed
+- Remove handling of index.mapper.dynamic in AutoCreateIndex([#13067](https://github.com/opensearch-project/OpenSearch/pull/13067))
+
+### Fixed
+- Fix get field mapping API returns 404 error in mixed cluster with multiple versions ([#13624](https://github.com/opensearch-project/OpenSearch/pull/13624))
+- Allow clearing `remote_store.compatibility_mode` setting ([#13646](https://github.com/opensearch-project/OpenSearch/pull/13646))
+- Painless: ensure type "UnmodifiableMap" for params ([#13885](https://github.com/opensearch-project/OpenSearch/pull/13885))
+- Don't return negative scores from `multi_match` query with `cross_fields` type ([#13829](https://github.com/opensearch-project/OpenSearch/pull/13829))
+- Pass parent filter to inner hit query ([#13903](https://github.com/opensearch-project/OpenSearch/pull/13903))
+- Fix NPE on restore searchable snapshot ([#13911](https://github.com/opensearch-project/OpenSearch/pull/13911))
+- Fix double invocation of postCollection when MultiBucketCollector is present ([#14015](https://github.com/opensearch-project/OpenSearch/pull/14015))
+- Fix ReplicaShardBatchAllocator to batch shards without duplicates ([#13710](https://github.com/opensearch-project/OpenSearch/pull/13710))
+- Java high-level REST client bulk() is not respecting the bulkRequest.requireAlias(true) method call ([#14146](https://github.com/opensearch-project/OpenSearch/pull/14146))
+- Fix ShardNotFoundException during request cache clean up ([#14219](https://github.com/opensearch-project/OpenSearch/pull/14219))
+- Fix Concurrent Modification Exception in Indices Request Cache([#14032](https://github.com/opensearch-project/OpenSearch/pull/14221))
+- Fix the rewrite method for MatchOnlyText field query ([#14248](https://github.com/opensearch-project/OpenSearch/pull/14248))
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml
index fa71137912a91..996c2aae8cfe4 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml
@@ -658,6 +658,7 @@ setup:
settings:
number_of_replicas: 0
number_of_shards: 1
+ refresh_interval: -1
mappings:
properties:
date:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
index 3a0099dae3b33..78e2e6858c6ff 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
@@ -1083,6 +1083,7 @@ setup:
settings:
number_of_replicas: 0
number_of_shards: 1
+ refresh_interval: -1
mappings:
properties:
date:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml
index 1356eac41ae79..fc82517788c91 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/330_auto_date_histogram.yml
@@ -106,8 +106,36 @@ setup:
version: " - 2.99.99"
reason: debug info for filter rewrite added in 3.0.0 (to be backported to 2.14.0)
+ - do:
+ indices.create:
+ index: test_profile
+ body:
+ settings:
+ number_of_shards: 1
+ number_of_replicas: 0
+ refresh_interval: -1
+ mappings:
+ properties:
+ date:
+ type: date
+
+ - do:
+ bulk:
+ index: test_profile
+ refresh: true
+ body:
+ - '{"index": {}}'
+ - '{"date": "2020-03-01", "v": 1}'
+ - '{"index": {}}'
+ - '{"date": "2020-03-02", "v": 2}'
+ - '{"index": {}}'
+ - '{"date": "2020-03-08", "v": 3}'
+ - '{"index": {}}'
+ - '{"date": "2020-03-09", "v": 4}'
+
- do:
search:
+ index: test_profile
body:
profile: true
size: 0
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/270_wildcard_fieldtype_queries.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/270_wildcard_fieldtype_queries.yml
new file mode 100644
index 0000000000000..05b6b2e5ed712
--- /dev/null
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/270_wildcard_fieldtype_queries.yml
@@ -0,0 +1,229 @@
+setup:
+ - skip:
+ version: " - 2.99.99"
+ reason: "Added in 2.15, but need to skip pre-3.0 before backport"
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ my_field:
+ type: wildcard
+ fields:
+ lower:
+ type: wildcard
+ normalizer: lowercase
+ doc_values:
+ type: wildcard
+ doc_values: true
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body:
+ my_field: "org.opensearch.transport.NodeDisconnectedException: [node_s0][127.0.0.1:39953][disconnected] disconnected"
+ - do:
+ index:
+ index: test
+ id: 2
+ body:
+ my_field: "[2024-06-08T06:31:37,443][INFO ][o.o.c.c.Coordinator ] [node_s2] cluster-manager node [{node_s0}{Nj7FjR7hRP2lh_zur8KN_g}{OTGOoWmmSsWP_RQ3tIKJ9g}{127.0.0.1}{127.0.0.1:39953}{imr}{shard_indexing_pressure_enabled=true}] failed, restarting discovery"
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body:
+ my_field: "[2024-06-08T06:31:37,451][INFO ][o.o.c.s.ClusterApplierService] [node_s2] cluster-manager node changed {previous [{node_s0}{Nj7FjR7hRP2lh_zur8KN_g}{OTGOoWmmSsWP_RQ3tIKJ9g}{127.0.0.1}{127.0.0.1:39953}{imr}{shard_indexing_pressure_enabled=true}], current []}, term: 1, version: 24, reason: becoming candidate: onLeaderFailure"
+ - do:
+ index:
+ index: test
+ id: 4
+ body:
+ my_field: "[2024-06-08T06:31:37,452][WARN ][o.o.c.NodeConnectionsService] [node_s1] failed to connect to {node_s0}{Nj7FjR7hRP2lh_zur8KN_g}{OTGOoWmmSsWP_RQ3tIKJ9g}{127.0.0.1}{127.0.0.1:39953}{imr}{shard_indexing_pressure_enabled=true} (tried [1] times)"
+ - do:
+ index:
+ index: test
+ id: 5
+ body:
+ my_field: "AbCd"
+ - do:
+ index:
+ index: test
+ id: 6
+ body:
+ other_field: "test"
+ - do:
+ indices.refresh: {}
+
+---
+"term query matches exact value":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ term:
+ my_field: "AbCd"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "5" }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ term:
+ my_field.doc_values: "AbCd"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "5" }
+
+---
+"term query matches lowercase-normalized value":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ term:
+ my_field.lower: "abcd"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "5" }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ term:
+ my_field.lower: "ABCD"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "5" }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ term:
+ my_field: "abcd"
+ - match: { hits.total.value: 0 }
+
+---
+"wildcard query matches":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ wildcard:
+ my_field:
+ value: "*Node*Exception*"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "1" }
+
+---
+"wildcard query matches lowercase-normalized field":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ wildcard:
+ my_field.lower:
+ value: "*node*exception*"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "1" }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ wildcard:
+ my_field.lower:
+ value: "*NODE*EXCEPTION*"
+ - match: { hits.total.value: 1 }
+ - match: { hits.hits.0._id: "1" }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ wildcard:
+ my_field:
+ value: "*node*exception*"
+ - match: { hits.total.value: 0 }
+
+---
+"prefix query matches":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ prefix:
+ my_field:
+ value: "[2024-06-08T"
+ - match: { hits.total.value: 3 }
+
+---
+"regexp query matches":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ regexp:
+ my_field:
+ value: ".*06-08.*cluster-manager node.*"
+ - match: { hits.total.value: 2 }
+
+---
+"regexp query matches lowercase-normalized field":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ regexp:
+ my_field.lower:
+ value: ".*06-08.*Cluster-Manager Node.*"
+ - match: { hits.total.value: 2 }
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ regexp:
+ my_field:
+ value: ".*06-08.*Cluster-Manager Node.*"
+ - match: { hits.total.value: 0 }
+
+---
+"wildcard match-all works":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ wildcard:
+ my_field:
+ value: "*"
+ - match: { hits.total.value: 5 }
+---
+"regexp match-all works":
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ regexp:
+ my_field:
+ value: ".*"
+ - match: { hits.total.value: 5 }
diff --git a/server/build.gradle b/server/build.gradle
index 624e5fe332662..b8a99facbf964 100644
--- a/server/build.gradle
+++ b/server/build.gradle
@@ -356,14 +356,18 @@ tasks.named("thirdPartyAudit").configure {
}
tasks.named("dependencyLicenses").configure {
+ mapping from: /jackson-.*/, to: 'jackson'
mapping from: /reactor-.*/, to: 'reactor'
mapping from: /lucene-.*/, to: 'lucene'
- dependencies = project.configurations.runtimeClasspath.fileCollection {
- it.group.startsWith('org.opensearch') == false ||
- // keep the following org.opensearch jars in
- (it.name == 'jna' ||
- it.name == 'securesm')
- }
+ dependencies = project.configurations.runtimeClasspath.incoming.artifactView {
+ componentFilter {
+ it instanceof ModuleComponentIdentifier &&
+ (it.group.startsWith('org.opensearch') == false ||
+ // keep the following org.opensearch jars in
+ (it.name == 'jna' ||
+ it.name == 'securesm'))
+ }
+ }.files
}
tasks.named("filepermissions").configure {
diff --git a/server/licenses/jackson-LICENSE b/server/licenses/jackson-LICENSE
new file mode 100644
index 0000000000000..f5f45d26a49d6
--- /dev/null
+++ b/server/licenses/jackson-LICENSE
@@ -0,0 +1,8 @@
+This copy of Jackson JSON processor streaming parser/generator is licensed under the
+Apache (Software) License, version 2.0 ("the License").
+See the License for details about distribution rights, and the
+specific rights regarding derivate works.
+
+You may obtain a copy of the License at:
+
+http://www.apache.org/licenses/LICENSE-2.0
diff --git a/server/licenses/jackson-NOTICE b/server/licenses/jackson-NOTICE
new file mode 100644
index 0000000000000..4c976b7b4cc58
--- /dev/null
+++ b/server/licenses/jackson-NOTICE
@@ -0,0 +1,20 @@
+# Jackson JSON processor
+
+Jackson is a high-performance, Free/Open Source JSON processing library.
+It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has
+been in development since 2007.
+It is currently developed by a community of developers, as well as supported
+commercially by FasterXML.com.
+
+## Licensing
+
+Jackson core and extension components may licensed under different licenses.
+To find the details that apply to this artifact see the accompanying LICENSE file.
+For more information, including possible other licensing options, contact
+FasterXML.com (http://fasterxml.com).
+
+## Credits
+
+A list of contributors may be found from CREDITS file, which is included
+in some artifacts (usually source distributions); but is always available
+from the source code management (SCM) system project uses.
diff --git a/server/licenses/jackson-core-2.17.1.jar.sha1 b/server/licenses/jackson-core-2.17.1.jar.sha1
new file mode 100644
index 0000000000000..82dab5981e652
--- /dev/null
+++ b/server/licenses/jackson-core-2.17.1.jar.sha1
@@ -0,0 +1 @@
+5e52a11644cd59a28ef79f02bddc2cc3bab45edb
\ No newline at end of file
diff --git a/server/licenses/jackson-dataformat-cbor-2.17.1.jar.sha1 b/server/licenses/jackson-dataformat-cbor-2.17.1.jar.sha1
new file mode 100644
index 0000000000000..ff42ed1f92cfe
--- /dev/null
+++ b/server/licenses/jackson-dataformat-cbor-2.17.1.jar.sha1
@@ -0,0 +1 @@
+ba5d8e6ecc62aa0e49c0ce935b8696352dbebc71
\ No newline at end of file
diff --git a/server/licenses/jackson-dataformat-smile-2.17.1.jar.sha1 b/server/licenses/jackson-dataformat-smile-2.17.1.jar.sha1
new file mode 100644
index 0000000000000..47d19067cf2a6
--- /dev/null
+++ b/server/licenses/jackson-dataformat-smile-2.17.1.jar.sha1
@@ -0,0 +1 @@
+89683ac4f0a0c2c4f69ea56b90480ed40266dac8
\ No newline at end of file
diff --git a/server/licenses/jackson-dataformat-yaml-2.17.1.jar.sha1 b/server/licenses/jackson-dataformat-yaml-2.17.1.jar.sha1
new file mode 100644
index 0000000000000..7946e994c7104
--- /dev/null
+++ b/server/licenses/jackson-dataformat-yaml-2.17.1.jar.sha1
@@ -0,0 +1 @@
+b4c7b8a9ea3f398116a75c146b982b22afebc4ee
\ No newline at end of file
diff --git a/server/licenses/jopt-simple-5.0.4.jar.sha1 b/server/licenses/jopt-simple-5.0.4.jar.sha1
new file mode 100644
index 0000000000000..7ade81efe4d0d
--- /dev/null
+++ b/server/licenses/jopt-simple-5.0.4.jar.sha1
@@ -0,0 +1 @@
+4fdac2fbe92dfad86aa6e9301736f6b4342a3f5c
\ No newline at end of file
diff --git a/server/licenses/jopt-simple-LICENSE.txt b/server/licenses/jopt-simple-LICENSE.txt
new file mode 100644
index 0000000000000..85f923a95268a
--- /dev/null
+++ b/server/licenses/jopt-simple-LICENSE.txt
@@ -0,0 +1,24 @@
+/*
+ The MIT License
+
+ Copyright (c) 2004-2015 Paul R. Holser, Jr.
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
diff --git a/server/licenses/jopt-simple-NOTICE.txt b/server/licenses/jopt-simple-NOTICE.txt
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/server/licenses/lucene-analysis-common-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-analysis-common-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 08339fa8a4ce1..0000000000000
--- a/server/licenses/lucene-analysis-common-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9cc4e600289bf1171b47de74536bd34c476f85a8
\ No newline at end of file
diff --git a/server/licenses/lucene-analysis-common-9.11.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..7139f6a43a15a
--- /dev/null
+++ b/server/licenses/lucene-analysis-common-9.11.0.jar.sha1
@@ -0,0 +1 @@
+75a0a333cf1e043102743066c929e65fe51cbcda
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-backward-codecs-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 3dce8a2162edd..0000000000000
--- a/server/licenses/lucene-backward-codecs-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8babfe85be7e36c893741e08072c11e71db09715
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-9.11.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..735e80b60b001
--- /dev/null
+++ b/server/licenses/lucene-backward-codecs-9.11.0.jar.sha1
@@ -0,0 +1 @@
+db385446bc3fd70e7c6a744276c0a157bd60ee0a
\ No newline at end of file
diff --git a/server/licenses/lucene-core-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-core-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 943a9b2fd214b..0000000000000
--- a/server/licenses/lucene-core-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3c2361bd633374ae3814b175cc25ccf773f67026
\ No newline at end of file
diff --git a/server/licenses/lucene-core-9.11.0.jar.sha1 b/server/licenses/lucene-core-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..b0d38c4165581
--- /dev/null
+++ b/server/licenses/lucene-core-9.11.0.jar.sha1
@@ -0,0 +1 @@
+2e487755a6814b2a1bc770c26569dcba86873dcf
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-grouping-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 8587c3ed5e82a..0000000000000
--- a/server/licenses/lucene-grouping-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d9f29b49cd1e0a061ff7fa4a53e8605bd49bd3d0
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-9.11.0.jar.sha1 b/server/licenses/lucene-grouping-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..562de95605b60
--- /dev/null
+++ b/server/licenses/lucene-grouping-9.11.0.jar.sha1
@@ -0,0 +1 @@
+882bdaf209b0acb332aa34836616424bcbecf462
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-highlighter-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 25579432a9cbd..0000000000000
--- a/server/licenses/lucene-highlighter-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-33bc26d46d62bb1cf3bf725db637226a43db7625
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-9.11.0.jar.sha1 b/server/licenses/lucene-highlighter-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..e0ef36d321c9d
--- /dev/null
+++ b/server/licenses/lucene-highlighter-9.11.0.jar.sha1
@@ -0,0 +1 @@
+44accdc03c5482e602718f7bf91e5940ba4e4870
\ No newline at end of file
diff --git a/server/licenses/lucene-join-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-join-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 1bfef89965e67..0000000000000
--- a/server/licenses/lucene-join-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-82966698abdb8f0367a162f642560566a6085dc8
\ No newline at end of file
diff --git a/server/licenses/lucene-join-9.11.0.jar.sha1 b/server/licenses/lucene-join-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..34c618ccfbcc7
--- /dev/null
+++ b/server/licenses/lucene-join-9.11.0.jar.sha1
@@ -0,0 +1 @@
+32a30ee03ed4f3e43bf63250270b2d4d53050045
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-memory-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 73adf3fcb2829..0000000000000
--- a/server/licenses/lucene-memory-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-746f392e7ec27a7cd6ca2add7dd8441d2a6085da
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-9.11.0.jar.sha1 b/server/licenses/lucene-memory-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..d730cfb4b7660
--- /dev/null
+++ b/server/licenses/lucene-memory-9.11.0.jar.sha1
@@ -0,0 +1 @@
+b3e80aa6aa3299118e76a23edc23b58f3ba5a515
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-misc-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 7f7dfead4c329..0000000000000
--- a/server/licenses/lucene-misc-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0eb06ecc39c0ec0db380a6e5aad1b16907e0bfd9
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-9.11.0.jar.sha1 b/server/licenses/lucene-misc-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..9be27f004435b
--- /dev/null
+++ b/server/licenses/lucene-misc-9.11.0.jar.sha1
@@ -0,0 +1 @@
+54fe308908194e1b0697a1157a45c5998c9e1083
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-queries-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index e3d400003efd8..0000000000000
--- a/server/licenses/lucene-queries-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0e56eb18cceffcd5ce2e47b679e873420254df74
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-9.11.0.jar.sha1 b/server/licenses/lucene-queries-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..b445610c25858
--- /dev/null
+++ b/server/licenses/lucene-queries-9.11.0.jar.sha1
@@ -0,0 +1 @@
+987d1286949ddf514b8405fd453ed47bebdfb12d
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-queryparser-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 8e8c7f5171107..0000000000000
--- a/server/licenses/lucene-queryparser-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dee3997a72eeae905e92930f53e724b6bef279da
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-9.11.0.jar.sha1 b/server/licenses/lucene-queryparser-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..a1620ba9c7708
--- /dev/null
+++ b/server/licenses/lucene-queryparser-9.11.0.jar.sha1
@@ -0,0 +1 @@
+e97fe1c0d102edb8d6e1c01454992fd2b8d80ae0
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-sandbox-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 2d1df051e30b4..0000000000000
--- a/server/licenses/lucene-sandbox-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-946bc45b87b3d770ab6828b0d0a5f8684f2c3624
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-9.11.0.jar.sha1 b/server/licenses/lucene-sandbox-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..0dc193f054973
--- /dev/null
+++ b/server/licenses/lucene-sandbox-9.11.0.jar.sha1
@@ -0,0 +1 @@
+5e46b790744bd9118ccc053f70235364213312a5
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-spatial-extras-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 0f9b7c0e90218..0000000000000
--- a/server/licenses/lucene-spatial-extras-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d73667f61fb5e7fde4cec52fcfbbfd9847068aec
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-9.11.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..9d3a8d2857db6
--- /dev/null
+++ b/server/licenses/lucene-spatial-extras-9.11.0.jar.sha1
@@ -0,0 +1 @@
+079ca5aaf544a3acde84b8b88423ace6dedc23eb
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-spatial3d-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 87894603e0d84..0000000000000
--- a/server/licenses/lucene-spatial3d-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a8e8ab80bfb6abd70932e50fe31e13ecf2e00987
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-9.11.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..fd5ff875a0113
--- /dev/null
+++ b/server/licenses/lucene-spatial3d-9.11.0.jar.sha1
@@ -0,0 +1 @@
+564558818d70fc384db5b36fbc8a0ab27b107609
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-9.11.0-snapshot-4be6531.jar.sha1 b/server/licenses/lucene-suggest-9.11.0-snapshot-4be6531.jar.sha1
deleted file mode 100644
index 6100f6fe0d585..0000000000000
--- a/server/licenses/lucene-suggest-9.11.0-snapshot-4be6531.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-45d6f0facd45d4e49585f0dabfa62ed5a1883033
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-9.11.0.jar.sha1 b/server/licenses/lucene-suggest-9.11.0.jar.sha1
new file mode 100644
index 0000000000000..2fa96e97f307a
--- /dev/null
+++ b/server/licenses/lucene-suggest-9.11.0.jar.sha1
@@ -0,0 +1 @@
+aa345db9b6caaf881e7890ea5b8911357d592167
\ No newline at end of file
diff --git a/server/licenses/snakeyaml-2.1.jar.sha1 b/server/licenses/snakeyaml-2.1.jar.sha1
new file mode 100644
index 0000000000000..5586b210a9736
--- /dev/null
+++ b/server/licenses/snakeyaml-2.1.jar.sha1
@@ -0,0 +1 @@
+c79f47315517560b5bd6a62376ee385e48105437
\ No newline at end of file
diff --git a/server/licenses/snakeyaml-LICENSE.txt b/server/licenses/snakeyaml-LICENSE.txt
new file mode 100644
index 0000000000000..d9a10c0d8e868
--- /dev/null
+++ b/server/licenses/snakeyaml-LICENSE.txt
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/server/licenses/snakeyaml-NOTICE.txt b/server/licenses/snakeyaml-NOTICE.txt
new file mode 100644
index 0000000000000..b51464eee1f00
--- /dev/null
+++ b/server/licenses/snakeyaml-NOTICE.txt
@@ -0,0 +1,24 @@
+***The art of simplicity is a puzzle of complexity.***
+
+## Overview ##
+[YAML](http://yaml.org) is a data serialization format designed for human readability and interaction with scripting languages.
+
+SnakeYAML is a YAML processor for the Java Virtual Machine.
+
+## SnakeYAML features ##
+
+* a **complete** [YAML 1.1 processor](http://yaml.org/spec/1.1/current.html). In particular, SnakeYAML can parse all examples from the specification.
+* Unicode support including UTF-8/UTF-16 input/output.
+* high-level API for serializing and deserializing native Java objects.
+* support for all types from the [YAML types repository](http://yaml.org/type/index.html).
+* relatively sensible error messages.
+
+## Info ##
+ * [Changes](https://bitbucket.org/asomov/snakeyaml/wiki/Changes)
+ * [Documentation](https://bitbucket.org/asomov/snakeyaml/wiki/Documentation)
+
+## Contribute ##
+* Mercurial DVCS is used to dance with the [source code](https://bitbucket.org/asomov/snakeyaml/src).
+* If you find a bug in SnakeYAML, please [file a bug report](https://bitbucket.org/asomov/snakeyaml/issues?status=new&status=open).
+* You may discuss SnakeYAML at
+[the mailing list](http://groups.google.com/group/snakeyaml-core).
\ No newline at end of file
diff --git a/server/licenses/zstd-jni-1.5.5-5.jar.sha1 b/server/licenses/zstd-jni-1.5.5-5.jar.sha1
new file mode 100644
index 0000000000000..498c60c34e3da
--- /dev/null
+++ b/server/licenses/zstd-jni-1.5.5-5.jar.sha1
@@ -0,0 +1 @@
+74ffdc5f140080adacf5278287aadd950179f848
\ No newline at end of file
diff --git a/server/licenses/zstd-jni-LICENSE.txt b/server/licenses/zstd-jni-LICENSE.txt
new file mode 100644
index 0000000000000..c4dd507c1c72f
--- /dev/null
+++ b/server/licenses/zstd-jni-LICENSE.txt
@@ -0,0 +1,29 @@
+-----------------------------------------------------------------------------
+** Beginning of "BSD License" text. **
+
+Zstd-jni: JNI bindings to Zstd Library
+
+Copyright (c) 2015-present, Luben Karavelov/ All rights reserved.
+
+BSD License
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this
+ list of conditions and the following disclaimer in the documentation and/or
+ other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/server/licenses/zstd-jni-NOTICE.txt b/server/licenses/zstd-jni-NOTICE.txt
new file mode 100644
index 0000000000000..389c97cbc892d
--- /dev/null
+++ b/server/licenses/zstd-jni-NOTICE.txt
@@ -0,0 +1 @@
+The code for the JNI bindings to Zstd library was originally authored by Luben Karavelov
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
index bc0557ddc2afa..fc0a574c191b1 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
@@ -34,6 +34,7 @@
import org.apache.lucene.index.CorruptIndexException;
import org.opensearch.Version;
+import org.opensearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse;
import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction;
import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest;
import org.opensearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction;
@@ -55,7 +56,9 @@
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.routing.ShardRouting;
+import org.opensearch.cluster.routing.ShardRoutingState;
import org.opensearch.cluster.routing.UnassignedInfo;
+import org.opensearch.cluster.routing.allocation.AllocationDecision;
import org.opensearch.cluster.routing.allocation.ExistingShardsAllocator;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.settings.Settings;
@@ -98,6 +101,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static java.util.Collections.emptyMap;
@@ -105,8 +109,10 @@
import static org.opensearch.cluster.coordination.ClusterBootstrapService.INITIAL_CLUSTER_MANAGER_NODES_SETTING;
import static org.opensearch.cluster.health.ClusterHealthStatus.GREEN;
import static org.opensearch.cluster.health.ClusterHealthStatus.RED;
+import static org.opensearch.cluster.health.ClusterHealthStatus.YELLOW;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
+import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.gateway.GatewayRecoveryTestUtils.corruptShard;
import static org.opensearch.gateway.GatewayRecoveryTestUtils.getDiscoveryNodes;
@@ -753,6 +759,7 @@ public void testMessyElectionsStillMakeClusterGoGreen() throws Exception {
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
.build()
);
@@ -843,6 +850,87 @@ public void testBatchModeDisabled() throws Exception {
ensureGreen("test");
}
+ public void testMultipleReplicaShardAssignmentWithDelayedAllocationAndDifferentNodeStartTimeInBatchMode() throws Exception {
+ internalCluster().startClusterManagerOnlyNodes(
+ 1,
+ Settings.builder().put(ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_BATCH_MODE.getKey(), true).build()
+ );
+ internalCluster().startDataOnlyNodes(6);
+ createIndex(
+ "test",
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 3)
+ .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "60m")
+ .build()
+ );
+ ensureGreen("test");
+
+ List nodesWithReplicaShards = findNodesWithShard(false);
+ Settings replicaNode0DataPathSettings = internalCluster().dataPathSettings(nodesWithReplicaShards.get(0));
+ Settings replicaNode1DataPathSettings = internalCluster().dataPathSettings(nodesWithReplicaShards.get(1));
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodesWithReplicaShards.get(0)));
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodesWithReplicaShards.get(1)));
+
+ ensureStableCluster(5);
+
+ logger.info("--> explicitly triggering reroute");
+ ClusterRerouteResponse clusterRerouteResponse = client().admin().cluster().prepareReroute().setRetryFailed(true).get();
+ assertTrue(clusterRerouteResponse.isAcknowledged());
+
+ ClusterHealthResponse health = client().admin().cluster().health(Requests.clusterHealthRequest().timeout("5m")).actionGet();
+ assertFalse(health.isTimedOut());
+ assertEquals(YELLOW, health.getStatus());
+ assertEquals(2, health.getUnassignedShards());
+ // shard should be unassigned because of Allocation_Delayed
+ ClusterAllocationExplainResponse allocationExplainResponse = client().admin()
+ .cluster()
+ .prepareAllocationExplain()
+ .setIndex("test")
+ .setShard(0)
+ .setPrimary(false)
+ .get();
+ assertEquals(
+ AllocationDecision.ALLOCATION_DELAYED,
+ allocationExplainResponse.getExplanation().getShardAllocationDecision().getAllocateDecision().getAllocationDecision()
+ );
+
+ logger.info("--> restarting the node 1");
+ internalCluster().startDataOnlyNode(
+ Settings.builder().put("node.name", nodesWithReplicaShards.get(0)).put(replicaNode0DataPathSettings).build()
+ );
+ clusterRerouteResponse = client().admin().cluster().prepareReroute().setRetryFailed(true).get();
+ assertTrue(clusterRerouteResponse.isAcknowledged());
+ ensureStableCluster(6);
+ waitUntil(
+ () -> client().admin().cluster().health(Requests.clusterHealthRequest().timeout("5m")).actionGet().getInitializingShards() == 0
+ );
+
+ health = client().admin().cluster().health(Requests.clusterHealthRequest().timeout("5m")).actionGet();
+ assertFalse(health.isTimedOut());
+ assertEquals(YELLOW, health.getStatus());
+ assertEquals(1, health.getUnassignedShards());
+ assertEquals(1, health.getDelayedUnassignedShards());
+ allocationExplainResponse = client().admin()
+ .cluster()
+ .prepareAllocationExplain()
+ .setIndex("test")
+ .setShard(0)
+ .setPrimary(false)
+ .get();
+ assertEquals(
+ AllocationDecision.ALLOCATION_DELAYED,
+ allocationExplainResponse.getExplanation().getShardAllocationDecision().getAllocateDecision().getAllocationDecision()
+ );
+
+ logger.info("--> restarting the node 0");
+ internalCluster().startDataOnlyNode(
+ Settings.builder().put("node.name", nodesWithReplicaShards.get(1)).put(replicaNode1DataPathSettings).build()
+ );
+ ensureStableCluster(7);
+ ensureGreen("test");
+ }
+
public void testNBatchesCreationAndAssignment() throws Exception {
// we will reduce batch size to 5 to make sure we have enough batches to test assignment
// Total number of primary shards = 50 (50 indices*1)
@@ -1293,4 +1381,14 @@ private void prepareIndex(String indexName, int numberOfPrimaryShards) {
index(indexName, "type", "1", Collections.emptyMap());
flush(indexName);
}
+
+ private List findNodesWithShard(final boolean primary) {
+ ClusterState state = client().admin().cluster().prepareState().get().getState();
+ List startedShards = state.routingTable().shardsWithState(ShardRoutingState.STARTED);
+ List requiredStartedShards = startedShards.stream()
+ .filter(startedShard -> startedShard.primary() == primary)
+ .collect(Collectors.toList());
+ Collections.shuffle(requiredStartedShards, random());
+ return requiredStartedShards.stream().map(shard -> state.nodes().get(shard.currentNodeId()).getName()).collect(Collectors.toList());
+ }
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/remote/RemoteClusterStateServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/remote/RemoteClusterStateServiceIT.java
index ab2f0f0080566..f6c7355ea06f6 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/remote/RemoteClusterStateServiceIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/remote/RemoteClusterStateServiceIT.java
@@ -26,13 +26,13 @@
import java.util.function.Function;
import java.util.stream.Collectors;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.COORDINATION_METADATA;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.CUSTOM_METADATA;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.DELIMITER;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.METADATA_FILE_PREFIX;
import static org.opensearch.gateway.remote.RemoteClusterStateService.REMOTE_CLUSTER_STATE_ENABLED_SETTING;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.SETTING_METADATA;
-import static org.opensearch.gateway.remote.RemoteClusterStateService.TEMPLATES_METADATA;
+import static org.opensearch.gateway.remote.RemoteClusterStateUtils.DELIMITER;
+import static org.opensearch.gateway.remote.RemoteClusterStateUtils.METADATA_FILE_PREFIX;
+import static org.opensearch.gateway.remote.model.RemoteCoordinationMetadata.COORDINATION_METADATA;
+import static org.opensearch.gateway.remote.model.RemoteCustomMetadata.CUSTOM_METADATA;
+import static org.opensearch.gateway.remote.model.RemotePersistentSettingsMetadata.SETTING_METADATA;
+import static org.opensearch.gateway.remote.model.RemoteTemplatesMetadata.TEMPLATES_METADATA;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
public class RemoteClusterStateServiceIT extends RemoteStoreBaseIntegTestCase {
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
index 766ca2c1189e5..299652e4f07a9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java
@@ -1280,8 +1280,8 @@ public void testDeleteAndCreateSameIndexShardOnSameNode() throws Exception {
final Index index = state.metadata().index(indexName).getIndex();
assertBusy(() -> {
- assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(false));
- assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true));
+ assertFalse(Arrays.stream(shardDirectory(node_1, index, 0)).anyMatch(Files::exists));
+ assertEquals(1, Arrays.stream(shardDirectory(node_2, index, 0)).filter(Files::exists).count());
});
logger.info("Moving the shard: {} again from node:{} to node:{}", indexName + "#0", node_2, node_1);
@@ -1294,11 +1294,10 @@ public void testDeleteAndCreateSameIndexShardOnSameNode() throws Exception {
.setWaitForNoInitializingShards(true)
.get();
assertThat(clusterHealth.isTimedOut(), equalTo(false));
- assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true));
assertBusy(() -> {
- assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true));
- assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(false));
+ assertEquals(1, Arrays.stream(shardDirectory(node_1, index, 0)).filter(Files::exists).count());
+ assertFalse(Arrays.stream(shardDirectory(node_2, index, 0)).anyMatch(Files::exists));
});
logger.info("Clearing the cache for index:{}. And verify the request stats doesn't go negative", indexName);
@@ -1311,11 +1310,12 @@ public void testDeleteAndCreateSameIndexShardOnSameNode() throws Exception {
assertTrue(stats.getMemorySizeInBytes() == 0);
}
- private Path shardDirectory(String server, Index index, int shard) {
+ private Path[] shardDirectory(String server, Index index, int shard) {
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server);
final Path[] paths = env.availableShardPaths(new ShardId(index, shard));
- assert paths.length == 1;
- return paths[0];
+ // the available paths of the shard may be bigger than the 1,
+ // it depends on `InternalTestCluster.numDataPaths`.
+ return paths;
}
private void setupIndex(Client client, String index) throws Exception {
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
index 8ce87f37d77cd..cf93a432d0371 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
@@ -104,7 +104,6 @@
import org.opensearch.indices.recovery.RecoveryState.Stage;
import org.opensearch.indices.replication.common.ReplicationLuceneIndex;
import org.opensearch.node.NodeClosedException;
-import org.opensearch.node.RecoverySettingsChunkSizePlugin;
import org.opensearch.plugins.AnalysisPlugin;
import org.opensearch.plugins.Plugin;
import org.opensearch.plugins.PluginsService;
@@ -156,7 +155,7 @@
import static java.util.stream.Collectors.toList;
import static org.opensearch.action.DocWriteResponse.Result.CREATED;
import static org.opensearch.action.DocWriteResponse.Result.UPDATED;
-import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING;
+import static org.opensearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_CHUNK_SIZE_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.empty;
@@ -187,7 +186,6 @@ protected Collection> nodePlugins() {
return Arrays.asList(
MockTransportService.TestPlugin.class,
MockFSIndexStore.TestPlugin.class,
- RecoverySettingsChunkSizePlugin.class,
TestAnalysisPlugin.class,
InternalSettingsPlugin.class,
MockEngineFactoryPlugin.class
@@ -263,7 +261,7 @@ private void slowDownRecovery(ByteSizeValue shardSize) {
// one chunk per sec..
.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), chunkSize, ByteSizeUnit.BYTES)
// small chunks
- .put(CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkSize, ByteSizeUnit.BYTES))
+ .put(INDICES_RECOVERY_CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkSize, ByteSizeUnit.BYTES))
)
.get()
.isAcknowledged()
@@ -278,7 +276,10 @@ private void restoreRecoverySpeed() {
.setTransientSettings(
Settings.builder()
.put(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), "20mb")
- .put(CHUNK_SIZE_SETTING.getKey(), RecoverySettings.DEFAULT_CHUNK_SIZE)
+ .put(
+ INDICES_RECOVERY_CHUNK_SIZE_SETTING.getKey(),
+ RecoverySettings.INDICES_RECOVERY_CHUNK_SIZE_SETTING.getDefault(Settings.EMPTY)
+ )
)
.get()
.isAcknowledged()
diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
index bf0533143cf91..692beb86279b9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
@@ -46,7 +46,6 @@
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.indices.recovery.FileChunkRequest;
import org.opensearch.indices.recovery.PeerRecoveryTargetService;
-import org.opensearch.node.RecoverySettingsChunkSizePlugin;
import org.opensearch.plugins.Plugin;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase;
@@ -61,7 +60,7 @@
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
-import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING;
+import static org.opensearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_CHUNK_SIZE_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -81,7 +80,7 @@ public static Collection