diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml
index ba2d3507e3c33..9c60770c8487a 100644
--- a/.buildkite/pipelines/intake.yml
+++ b/.buildkite/pipelines/intake.yml
@@ -76,6 +76,7 @@ steps:
- trigger: elasticsearch-dra-workflow
label: Trigger DRA snapshot workflow
async: true
+ branches: "main 8.* 7.17"
build:
branch: "$BUILDKITE_BRANCH"
commit: "$BUILDKITE_COMMIT"
diff --git a/docs/changelog/113374.yaml b/docs/changelog/113374.yaml
new file mode 100644
index 0000000000000..f1d5750de0f60
--- /dev/null
+++ b/docs/changelog/113374.yaml
@@ -0,0 +1,5 @@
+pr: 113374
+summary: Add ESQL match function
+area: ES|QL
+type: feature
+issues: []
diff --git a/docs/changelog/114168.yaml b/docs/changelog/114168.yaml
new file mode 100644
index 0000000000000..58f1ab7110e7d
--- /dev/null
+++ b/docs/changelog/114168.yaml
@@ -0,0 +1,5 @@
+pr: 114168
+summary: Add a query rules tester API call
+area: Relevance
+type: enhancement
+issues: []
diff --git a/docs/changelog/114407.yaml b/docs/changelog/114407.yaml
new file mode 100644
index 0000000000000..4c1134a9d3834
--- /dev/null
+++ b/docs/changelog/114407.yaml
@@ -0,0 +1,6 @@
+pr: 114407
+summary: Fix synthetic source handling for `bit` type in `dense_vector` field
+area: Search
+type: bug
+issues:
+ - 114402
diff --git a/docs/changelog/114453.yaml b/docs/changelog/114453.yaml
new file mode 100644
index 0000000000000..0d5345ad9d2a6
--- /dev/null
+++ b/docs/changelog/114453.yaml
@@ -0,0 +1,5 @@
+pr: 114453
+summary: Switch default chunking strategy to sentence
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/docs/changelog/114457.yaml b/docs/changelog/114457.yaml
new file mode 100644
index 0000000000000..9558c41852f69
--- /dev/null
+++ b/docs/changelog/114457.yaml
@@ -0,0 +1,6 @@
+pr: 114457
+summary: "[Inference API] Introduce Update API to change some aspects of existing\
+ \ inference endpoints"
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/docs/changelog/114482.yaml b/docs/changelog/114482.yaml
new file mode 100644
index 0000000000000..a5e2e981f7adc
--- /dev/null
+++ b/docs/changelog/114482.yaml
@@ -0,0 +1,5 @@
+pr: 114482
+summary: Remove snapshot build restriction for match and qstr functions
+area: ES|QL
+type: feature
+issues: []
diff --git a/docs/changelog/114549.yaml b/docs/changelog/114549.yaml
new file mode 100644
index 0000000000000..a6bdbba93876b
--- /dev/null
+++ b/docs/changelog/114549.yaml
@@ -0,0 +1,5 @@
+pr: 114549
+summary: Send mid-stream errors to users
+area: Machine Learning
+type: bug
+issues: []
diff --git a/docs/changelog/114596.yaml b/docs/changelog/114596.yaml
new file mode 100644
index 0000000000000..a36978dcacd8c
--- /dev/null
+++ b/docs/changelog/114596.yaml
@@ -0,0 +1,5 @@
+pr: 114596
+summary: Stream Google Completion
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/docs/changelog/114683.yaml b/docs/changelog/114683.yaml
new file mode 100644
index 0000000000000..a677e65a12b0e
--- /dev/null
+++ b/docs/changelog/114683.yaml
@@ -0,0 +1,5 @@
+pr: 114683
+summary: Default inference endpoint for the multilingual-e5-small model
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/docs/changelog/114715.yaml b/docs/changelog/114715.yaml
new file mode 100644
index 0000000000000..0894cb2fa42ca
--- /dev/null
+++ b/docs/changelog/114715.yaml
@@ -0,0 +1,5 @@
+pr: 114715
+summary: Ignore unrecognized openai sse fields
+area: Machine Learning
+type: bug
+issues: []
diff --git a/docs/changelog/114732.yaml b/docs/changelog/114732.yaml
new file mode 100644
index 0000000000000..42176cdbda443
--- /dev/null
+++ b/docs/changelog/114732.yaml
@@ -0,0 +1,5 @@
+pr: 114732
+summary: Stream Bedrock Completion
+area: Machine Learning
+type: enhancement
+issues: []
diff --git a/docs/reference/esql/functions/description/match.asciidoc b/docs/reference/esql/functions/description/match.asciidoc
new file mode 100644
index 0000000000000..2a27fe4814395
--- /dev/null
+++ b/docs/reference/esql/functions/description/match.asciidoc
@@ -0,0 +1,5 @@
+// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
+
+*Description*
+
+Performs a match query on the specified field. Returns true if the provided query matches the row.
diff --git a/docs/reference/esql/functions/examples/match.asciidoc b/docs/reference/esql/functions/examples/match.asciidoc
new file mode 100644
index 0000000000000..3f31d68ea9abb
--- /dev/null
+++ b/docs/reference/esql/functions/examples/match.asciidoc
@@ -0,0 +1,13 @@
+// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
+
+*Example*
+
+[source.merge.styled,esql]
+----
+include::{esql-specs}/match-function.csv-spec[tag=match-with-field]
+----
+[%header.monospaced.styled,format=dsv,separator=|]
+|===
+include::{esql-specs}/match-function.csv-spec[tag=match-with-field-result]
+|===
+
diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json
new file mode 100644
index 0000000000000..8a355360a790f
--- /dev/null
+++ b/docs/reference/esql/functions/kibana/definition/match.json
@@ -0,0 +1,85 @@
+{
+ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.",
+ "type" : "eval",
+ "name" : "match",
+ "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.",
+ "signatures" : [
+ {
+ "params" : [
+ {
+ "name" : "field",
+ "type" : "keyword",
+ "optional" : false,
+ "description" : "Field that the query will target."
+ },
+ {
+ "name" : "query",
+ "type" : "keyword",
+ "optional" : false,
+ "description" : "Text you wish to find in the provided field."
+ }
+ ],
+ "variadic" : false,
+ "returnType" : "boolean"
+ },
+ {
+ "params" : [
+ {
+ "name" : "field",
+ "type" : "keyword",
+ "optional" : false,
+ "description" : "Field that the query will target."
+ },
+ {
+ "name" : "query",
+ "type" : "text",
+ "optional" : false,
+ "description" : "Text you wish to find in the provided field."
+ }
+ ],
+ "variadic" : false,
+ "returnType" : "boolean"
+ },
+ {
+ "params" : [
+ {
+ "name" : "field",
+ "type" : "text",
+ "optional" : false,
+ "description" : "Field that the query will target."
+ },
+ {
+ "name" : "query",
+ "type" : "keyword",
+ "optional" : false,
+ "description" : "Text you wish to find in the provided field."
+ }
+ ],
+ "variadic" : false,
+ "returnType" : "boolean"
+ },
+ {
+ "params" : [
+ {
+ "name" : "field",
+ "type" : "text",
+ "optional" : false,
+ "description" : "Field that the query will target."
+ },
+ {
+ "name" : "query",
+ "type" : "text",
+ "optional" : false,
+ "description" : "Text you wish to find in the provided field."
+ }
+ ],
+ "variadic" : false,
+ "returnType" : "boolean"
+ }
+ ],
+ "examples" : [
+ "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;"
+ ],
+ "preview" : true,
+ "snapshot_only" : false
+}
diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json
index 72be906cbae63..9823c3cff8923 100644
--- a/docs/reference/esql/functions/kibana/definition/qstr.json
+++ b/docs/reference/esql/functions/kibana/definition/qstr.json
@@ -33,5 +33,5 @@
"from books \n| where qstr(\"author: Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;"
],
"preview" : true,
- "snapshot_only" : true
+ "snapshot_only" : false
}
diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md
new file mode 100644
index 0000000000000..3c06662982bbf
--- /dev/null
+++ b/docs/reference/esql/functions/kibana/docs/match.md
@@ -0,0 +1,14 @@
+
+
+### MATCH
+Performs a match query on the specified field. Returns true if the provided query matches the row.
+
+```
+from books
+| where match(author, "Faulkner")
+| keep book_no, author
+| sort book_no
+| limit 5;
+```
diff --git a/docs/reference/esql/functions/layout/match.asciidoc b/docs/reference/esql/functions/layout/match.asciidoc
new file mode 100644
index 0000000000000..e62c81548c2b1
--- /dev/null
+++ b/docs/reference/esql/functions/layout/match.asciidoc
@@ -0,0 +1,17 @@
+// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
+
+[discrete]
+[[esql-match]]
+=== `MATCH`
+
+preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."]
+
+*Syntax*
+
+[.text-center]
+image::esql/functions/signature/match.svg[Embedded,opts=inline]
+
+include::../parameters/match.asciidoc[]
+include::../description/match.asciidoc[]
+include::../types/match.asciidoc[]
+include::../examples/match.asciidoc[]
diff --git a/docs/reference/esql/functions/parameters/match.asciidoc b/docs/reference/esql/functions/parameters/match.asciidoc
new file mode 100644
index 0000000000000..f18adb28cd20c
--- /dev/null
+++ b/docs/reference/esql/functions/parameters/match.asciidoc
@@ -0,0 +1,9 @@
+// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
+
+*Parameters*
+
+`field`::
+Field that the query will target.
+
+`query`::
+Text you wish to find in the provided field.
diff --git a/docs/reference/esql/functions/signature/match.svg b/docs/reference/esql/functions/signature/match.svg
new file mode 100644
index 0000000000000..e7bb001247a9d
--- /dev/null
+++ b/docs/reference/esql/functions/signature/match.svg
@@ -0,0 +1 @@
+MATCH ( field , query )
diff --git a/docs/reference/esql/functions/types/match.asciidoc b/docs/reference/esql/functions/types/match.asciidoc
new file mode 100644
index 0000000000000..7523b29c62b1d
--- /dev/null
+++ b/docs/reference/esql/functions/types/match.asciidoc
@@ -0,0 +1,12 @@
+// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.
+
+*Supported types*
+
+[%header.monospaced.styled,format=dsv,separator=|]
+|===
+field | query | result
+keyword | keyword | boolean
+keyword | text | boolean
+text | keyword | boolean
+text | text | boolean
+|===
diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc
index 8fdc9f2e4f9cb..1e4c5a21d386c 100644
--- a/docs/reference/modules/network.asciidoc
+++ b/docs/reference/modules/network.asciidoc
@@ -153,23 +153,34 @@ The only requirements are that each node must be:
* Accessible at its transport publish address by all other nodes in its
cluster, and by any remote clusters that will discover it using
- <>.
+ <>.
Each node must have its own distinct publish address.
If you specify the transport publish address using a hostname then {es} will
resolve this hostname to an IP address once during startup, and other nodes
will use the resulting IP address instead of resolving the name again
-themselves. To avoid confusion, use a hostname which resolves to the node's
-address in all network locations.
+themselves. You must use a hostname such that all of the addresses to which it
+resolves are addresses at which the node is accessible from all other nodes. To
+avoid confusion, it is simplest to use a hostname which resolves to a single
+address.
+
+If you specify the transport publish address using a
+<> then {es} will resolve this value to
+a single IP address during startup, and other nodes will use the resulting IP
+address instead of resolving the value again themselves. You must use a value
+such that all of the addresses to which it resolves are addresses at which the
+node is accessible from all other nodes. To avoid confusion, it is simplest to
+use a value which resolves to a single address. It is usually a mistake to use
+`0.0.0.0` as a publish address on hosts with more than one network interface.
===== Using a single address
The most common configuration is for {es} to bind to a single address at which
-it is accessible to clients and other nodes. In this configuration you should
-just set `network.host` to that address. You should not separately set any bind
-or publish addresses, nor should you separately configure the addresses for the
-HTTP or transport interfaces.
+it is accessible to clients and other nodes. To use this configuration, set
+only `network.host` to the desired address. Do not separately set any bind or
+publish addresses. Do not separately specify the addresses for the HTTP or
+transport interfaces.
===== Using multiple addresses
diff --git a/docs/reference/query-rules/apis/index.asciidoc b/docs/reference/query-rules/apis/index.asciidoc
index 53d5fc3dc4eee..fbeb477acacb5 100644
--- a/docs/reference/query-rules/apis/index.asciidoc
+++ b/docs/reference/query-rules/apis/index.asciidoc
@@ -23,6 +23,7 @@ Use the following APIs to manage query rulesets:
* <>
* <>
* <>
+* preview:[] <>
include::put-query-ruleset.asciidoc[]
include::get-query-ruleset.asciidoc[]
@@ -31,4 +32,5 @@ include::delete-query-ruleset.asciidoc[]
include::put-query-rule.asciidoc[]
include::get-query-rule.asciidoc[]
include::delete-query-rule.asciidoc[]
+include::test-query-ruleset.asciidoc[]
diff --git a/docs/reference/query-rules/apis/test-query-ruleset.asciidoc b/docs/reference/query-rules/apis/test-query-ruleset.asciidoc
new file mode 100644
index 0000000000000..4a670645cea6e
--- /dev/null
+++ b/docs/reference/query-rules/apis/test-query-ruleset.asciidoc
@@ -0,0 +1,133 @@
+[role="xpack"]
+[[test-query-ruleset]]
+=== Test query ruleset
+
+++++
+Tests query ruleset
+++++
+
+Evaluates match criteria against a query ruleset to identify the rules that would match that criteria.
+
+preview::[]
+
+[[test-query-ruleset-request]]
+==== {api-request-title}
+
+`POST _query_rules//_test`
+
+[[test-query-ruleset-prereq]]
+==== {api-prereq-title}
+
+Requires the `manage_search_query_rules` privilege.
+
+[[test-query-ruleset-path-params]]
+==== {api-path-parms-title}
+
+``::
+(Required, string)
+
+[[test-query-rule-request-body]]
+==== {api-request-body-title}
+
+`match_criteria`::
+(Required, object) Defines the match criteria to apply to rules in the given query ruleset.
+Match criteria should match the keys defined in the `criteria.metadata` field of the rule.
+
+[[test-query-ruleset-response-codes]]
+==== {api-response-codes-title}
+
+`400`::
+The `ruleset_id` or `match_criteria` were not provided.
+
+`404` (Missing resources)::
+No query ruleset matching `ruleset_id` could be found.
+
+[[test-query-ruleset-example]]
+==== {api-examples-title}
+
+To test a ruleset, provide the match criteria that you want to test against:
+
+////
+
+[source,console]
+--------------------------------------------------
+PUT _query_rules/my-ruleset
+{
+ "rules": [
+ {
+ "rule_id": "my-rule1",
+ "type": "pinned",
+ "criteria": [
+ {
+ "type": "contains",
+ "metadata": "query_string",
+ "values": [ "pugs", "puggles" ]
+ }
+ ],
+ "actions": {
+ "ids": [
+ "id1",
+ "id2"
+ ]
+ }
+ },
+ {
+ "rule_id": "my-rule2",
+ "type": "pinned",
+ "criteria": [
+ {
+ "type": "fuzzy",
+ "metadata": "query_string",
+ "values": [ "rescue dogs" ]
+ }
+ ],
+ "actions": {
+ "docs": [
+ {
+ "_index": "index1",
+ "_id": "id3"
+ },
+ {
+ "_index": "index2",
+ "_id": "id4"
+ }
+ ]
+ }
+ }
+ ]
+}
+--------------------------------------------------
+// TESTSETUP
+
+[source,console]
+--------------------------------------------------
+DELETE _query_rules/my-ruleset
+--------------------------------------------------
+// TEARDOWN
+
+////
+
+[source,console]
+----
+POST _query_rules/my-ruleset/_test
+{
+ "match_criteria": {
+ "query_string": "puggles"
+ }
+}
+----
+
+A sample response:
+
+[source,console-result]
+----
+{
+ "total_matched_rules": 1,
+ "matched_rules": [
+ {
+ "ruleset_id": "my-ruleset",
+ "rule_id": "my-rule1"
+ }
+ ]
+}
+----
diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc
index 957f57ffc9105..5fd2304ff9378 100644
--- a/docs/reference/rest-api/usage.asciidoc
+++ b/docs/reference/rest-api/usage.asciidoc
@@ -210,7 +210,12 @@ GET /_xpack/usage
"service": "elasticsearch",
"task_type": "SPARSE_EMBEDDING",
"count": 1
- }
+ },
+ {
+ "service": "elasticsearch",
+ "task_type": "TEXT_EMBEDDING",
+ "count": 1
+ },
]
},
"logstash" : {
diff --git a/docs/reference/security/ccs-clients-integrations/index.asciidoc b/docs/reference/security/ccs-clients-integrations/index.asciidoc
index 11e58bb2aaf57..414e71d71b46e 100644
--- a/docs/reference/security/ccs-clients-integrations/index.asciidoc
+++ b/docs/reference/security/ccs-clients-integrations/index.asciidoc
@@ -13,6 +13,7 @@ be secured as well, or at least communicate with the cluster in a secured way:
* <>
* {auditbeat-ref}/securing-auditbeat.html[Auditbeat]
* {filebeat-ref}/securing-filebeat.html[Filebeat]
+* {fleet-guide}/secure.html[{fleet} & {agent}]
* {heartbeat-ref}/securing-heartbeat.html[Heartbeat]
* {kibana-ref}/using-kibana-with-security.html[{kib}]
* {logstash-ref}/ls-security.html[Logstash]
diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java
index aa869b1af4f5e..6f0b473b5ba1f 100644
--- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java
+++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java
@@ -40,6 +40,7 @@
import java.util.ServiceLoader;
import java.util.Set;
import java.util.function.Function;
+import java.util.function.LongFunction;
/**
* A utility to build XContent (ie json).
@@ -107,13 +108,15 @@ public static XContentBuilder builder(XContentType xContentType, Set inc
private static final Map, Writer> WRITERS;
private static final Map, HumanReadableTransformer> HUMAN_READABLE_TRANSFORMERS;
private static final Map, Function> DATE_TRANSFORMERS;
+ private static final LongFunction UNIX_EPOCH_MILLIS_FORMATTER;
+
static {
Map, Writer> writers = new HashMap<>();
writers.put(Boolean.class, (b, v) -> b.value((Boolean) v));
writers.put(boolean[].class, (b, v) -> b.values((boolean[]) v));
writers.put(Byte.class, (b, v) -> b.value((Byte) v));
writers.put(byte[].class, (b, v) -> b.value((byte[]) v));
- writers.put(Date.class, XContentBuilder::timeValue);
+ writers.put(Date.class, XContentBuilder::timestampValue);
writers.put(Double.class, (b, v) -> b.value((Double) v));
writers.put(double[].class, (b, v) -> b.values((double[]) v));
writers.put(Float.class, (b, v) -> b.value((Float) v));
@@ -129,8 +132,8 @@ public static XContentBuilder builder(XContentType xContentType, Set inc
writers.put(Locale.class, (b, v) -> b.value(v.toString()));
writers.put(Class.class, (b, v) -> b.value(v.toString()));
writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString()));
- writers.put(Calendar.class, XContentBuilder::timeValue);
- writers.put(GregorianCalendar.class, XContentBuilder::timeValue);
+ writers.put(Calendar.class, XContentBuilder::timestampValue);
+ writers.put(GregorianCalendar.class, XContentBuilder::timestampValue);
writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v));
writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v));
@@ -140,6 +143,8 @@ public static XContentBuilder builder(XContentType xContentType, Set inc
// treat strings as already converted
dateTransformers.put(String.class, Function.identity());
+ LongFunction unixEpochMillisFormatter = Long::toString;
+
// Load pluggable extensions
for (XContentBuilderExtension service : ServiceLoader.load(XContentBuilderExtension.class)) {
Map, Writer> addlWriters = service.getXContentWriters();
@@ -157,11 +162,14 @@ public static XContentBuilder builder(XContentType xContentType, Set inc
writers.putAll(addlWriters);
humanReadableTransformer.putAll(addlTransformers);
dateTransformers.putAll(addlDateTransformers);
+
+ unixEpochMillisFormatter = service::formatUnixEpochMillis;
}
WRITERS = Map.copyOf(writers);
HUMAN_READABLE_TRANSFORMERS = Map.copyOf(humanReadableTransformer);
DATE_TRANSFORMERS = Map.copyOf(dateTransformers);
+ UNIX_EPOCH_MILLIS_FORMATTER = unixEpochMillisFormatter;
}
@FunctionalInterface
@@ -797,52 +805,53 @@ public XContentBuilder utf8Value(byte[] bytes, int offset, int length) throws IO
}
////////////////////////////////////////////////////////////////////////////
- // Date
+ // Timestamps
//////////////////////////////////
/**
- * Write a time-based field and value, if the passed timeValue is null a
- * null value is written, otherwise a date transformers lookup is performed.
-
- * @throws IllegalArgumentException if there is no transformers for the type of object
+ * Write a field with a timestamp value: if the passed timestamp is null then writes null, otherwise looks up the date transformer
+ * for the type of {@code timestamp} and uses it to format the value.
+ *
+ * @throws IllegalArgumentException if there is no transformer for the given value type
*/
- public XContentBuilder timeField(String name, Object timeValue) throws IOException {
- return field(name).timeValue(timeValue);
+ public XContentBuilder timestampField(String name, Object timestamp) throws IOException {
+ return field(name).timestampValue(timestamp);
}
/**
- * If the {@code humanReadable} flag is set, writes both a formatted and
- * unformatted version of the time value using the date transformer for the
- * {@link Long} class.
+ * Writes a field containing the raw number of milliseconds since the unix epoch, and also if the {@code humanReadable} flag is set,
+ * writes a formatted representation of this value using the UNIX_EPOCH_MILLIS_FORMATTER.
*/
- public XContentBuilder timeField(String name, String readableName, long value) throws IOException {
- assert name.equals(readableName) == false : "expected raw and readable field names to differ, but they were both: " + name;
+ public XContentBuilder timestampFieldsFromUnixEpochMillis(String rawFieldName, String humanReadableFieldName, long unixEpochMillis)
+ throws IOException {
+ assert rawFieldName.equals(humanReadableFieldName) == false
+ : "expected raw and readable field names to differ, but they were both: " + rawFieldName;
if (humanReadable) {
- Function longTransformer = DATE_TRANSFORMERS.get(Long.class);
- if (longTransformer == null) {
- throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type Long");
- }
- field(readableName).value(longTransformer.apply(value));
+ field(humanReadableFieldName, UNIX_EPOCH_MILLIS_FORMATTER.apply(unixEpochMillis));
}
- field(name, value);
+ field(rawFieldName, unixEpochMillis);
return this;
}
/**
- * Write a time-based value, if the value is null a null value is written,
- * otherwise a date transformers lookup is performed.
-
- * @throws IllegalArgumentException if there is no transformers for the type of object
+ * Write a timestamp value: if the passed timestamp is null then writes null, otherwise looks up the date transformer for the type of
+ * {@code timestamp} and uses it to format the value.
+ *
+ * @throws IllegalArgumentException if there is no transformer for the given value type
*/
- public XContentBuilder timeValue(Object timeValue) throws IOException {
- if (timeValue == null) {
+ public XContentBuilder timestampValue(Object timestamp) throws IOException {
+ if (timestamp == null) {
return nullValue();
} else {
- Function transformer = DATE_TRANSFORMERS.get(timeValue.getClass());
+ Function transformer = DATE_TRANSFORMERS.get(timestamp.getClass());
if (transformer == null) {
- throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type " + timeValue.getClass());
+ final var exception = new IllegalArgumentException(
+ "cannot write timestamp value xcontent for value of unknown type " + timestamp.getClass()
+ );
+ assert false : exception;
+ throw exception;
}
- return value(transformer.apply(timeValue));
+ return value(transformer.apply(timestamp));
}
}
diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java
index 1e48667079cfc..4e3b442e7d473 100644
--- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java
+++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java
@@ -68,4 +68,9 @@ public interface XContentBuilderExtension {
*
*/
Map, Function> getDateTransformers();
+
+ /**
+ * Used to format a {@code long} representing the number of milliseconds since the Unix Epoch.
+ */
+ String formatUnixEpochMillis(long unixEpochMillis);
}
diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java
index 3e66bf0edf394..e911bf1a41198 100644
--- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java
+++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java
@@ -65,17 +65,17 @@ protected Collection> nodePlugins() {
}
private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
- return prepareIndex(idx).setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject());
+ return prepareIndex(idx).setSource(jsonBuilder().startObject().timestampField("date", date).field("value", value).endObject());
}
private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception {
return prepareIndex("idx").setSource(
jsonBuilder().startObject()
.field("value", value)
- .timeField("date", date(month, day))
+ .timestampField("date", date(month, day))
.startArray("dates")
- .timeValue(date(month, day))
- .timeValue(date(month + 1, day + 1))
+ .timestampValue(date(month, day))
+ .timestampValue(date(month + 1, day + 1))
.endArray()
.endObject()
);
diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java
index a3d0347c3d192..d6a0fd86265e5 100644
--- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java
+++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java
@@ -59,7 +59,7 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider {
public Settings getAdditionalIndexSettings(
String indexName,
@Nullable String dataStreamName,
- boolean isTimeSeries,
+ @Nullable IndexMode templateIndexMode,
Metadata metadata,
Instant resolvedAt,
Settings indexTemplateAndCreateRequestSettings,
@@ -70,15 +70,16 @@ public Settings getAdditionalIndexSettings(
// First backing index is created and then data stream is rolled over (in a single cluster state update).
// So at this point we can't check index_mode==time_series,
// so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES
+ boolean isMigratingToTimeSeries = templateIndexMode == IndexMode.TIME_SERIES;
boolean migrating = dataStream != null
&& (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD)
- && isTimeSeries;
+ && isMigratingToTimeSeries;
IndexMode indexMode;
if (migrating) {
indexMode = IndexMode.TIME_SERIES;
} else if (dataStream != null) {
- indexMode = isTimeSeries ? dataStream.getIndexMode() : null;
- } else if (isTimeSeries) {
+ indexMode = isMigratingToTimeSeries ? dataStream.getIndexMode() : null;
+ } else if (isMigratingToTimeSeries) {
indexMode = IndexMode.TIME_SERIES;
} else {
indexMode = null;
diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java
index d8d4a9c03933a..015752724cb5d 100644
--- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java
+++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java
@@ -78,7 +78,7 @@ public void testGetAdditionalIndexSettings() throws Exception {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -123,7 +123,7 @@ public void testGetAdditionalIndexSettingsIndexRoutingPathAlreadyDefined() throw
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -193,7 +193,7 @@ public void testGetAdditionalIndexSettingsMappingsMerging() throws Exception {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -218,7 +218,7 @@ public void testGetAdditionalIndexSettingsNoMappings() {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -243,7 +243,7 @@ public void testGetAdditionalIndexSettingsLookAheadTime() throws Exception {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -268,7 +268,7 @@ public void testGetAdditionalIndexSettingsLookBackTime() throws Exception {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -299,7 +299,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreated() throws Exce
var result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -336,7 +336,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi
() -> provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -362,7 +362,7 @@ public void testGetAdditionalIndexSettingsNonTsdbTemplate() {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- false,
+ null,
metadata,
Instant.ofEpochMilli(1L),
settings,
@@ -382,7 +382,7 @@ public void testGetAdditionalIndexSettingsMigrateToTsdb() {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 2),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
@@ -415,7 +415,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromTsdb() {
Settings result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 2),
dataStreamName,
- false,
+ null,
metadata,
Instant.ofEpochMilli(1L),
settings,
@@ -694,7 +694,7 @@ private Settings generateTsdbSettings(String mapping, Instant now) throws IOExce
var result = provider.getAdditionalIndexSettings(
DataStream.getDefaultBackingIndexName(dataStreamName, 1),
dataStreamName,
- true,
+ IndexMode.TIME_SERIES,
metadata,
now,
settings,
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java
index 82888fa39c857..fcfd8e51aabb5 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadata.java
@@ -66,7 +66,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
// (we'll be a in a json map where the id is the key)
builder.startObject();
builder.field(VERSION.getPreferredName(), version);
- builder.timeField(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate);
+ builder.timestampFieldsFromUnixEpochMillis(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), modifiedDate);
builder.field(DATABASE.getPreferredName(), database);
builder.endObject();
return builder;
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java
index 0d1f1d2f9f660..7501c0094d647 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java
@@ -110,7 +110,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.startObject();
builder.field("id", database.id()); // serialize including the id -- this is get response serialization
builder.field(VERSION.getPreferredName(), item.version());
- builder.timeField(MODIFIED_DATE_MILLIS.getPreferredName(), MODIFIED_DATE.getPreferredName(), item.modifiedDate());
+ builder.timestampFieldsFromUnixEpochMillis(
+ MODIFIED_DATE_MILLIS.getPreferredName(),
+ MODIFIED_DATE.getPreferredName(),
+ item.modifiedDate()
+ );
builder.field(DATABASE.getPreferredName(), database);
builder.endObject();
}
diff --git a/muted-tests.yml b/muted-tests.yml
index c624dc5000e7b..fb02f24d66a8f 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -344,6 +344,15 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/114611
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
issue: https://github.com/elastic/elasticsearch/issues/114723
+- class: org.elasticsearch.kibana.KibanaThreadPoolIT
+ method: testBlockedThreadPoolsRejectUserRequests
+ issue: https://github.com/elastic/elasticsearch/issues/113939
+- class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT
+ method: testGetModel
+ issue: https://github.com/elastic/elasticsearch/issues/114657
+- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
+ method: testPutE5WithTrainedModelAndInference
+ issue: https://github.com/elastic/elasticsearch/issues/114023
# Examples:
#
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json
new file mode 100644
index 0000000000000..c82b45771ac7f
--- /dev/null
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.test.json
@@ -0,0 +1,38 @@
+{
+ "query_rules.test": {
+ "documentation": {
+ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-query-ruleset.html",
+ "description": "Tests a query ruleset to identify the rules that would match input criteria"
+ },
+ "stability": "experimental",
+ "visibility": "public",
+ "headers": {
+ "accept": [
+ "application/json"
+ ],
+ "content_type": [
+ "application/json"
+ ]
+ },
+ "url": {
+ "paths": [
+ {
+ "path": "/_query_rules/{ruleset_id}/_test",
+ "methods": [
+ "POST"
+ ],
+ "parts": {
+ "ruleset_id": {
+ "type": "string",
+ "description": "The unique identifier of the ruleset to test."
+ }
+ }
+ }
+ ]
+ },
+ "body": {
+ "description": "The match criteria to test against the ruleset",
+ "required": true
+ }
+ }
+}
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml
index 1d703e451d5b9..48926eb866a5e 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml
@@ -411,6 +411,55 @@ index param - nested array within array:
- match: { hits.hits.0._source.path.to.some.3.id: [ 1000, 2000 ] }
+---
+index param - nested array within array - disabled second pass:
+ - requires:
+ cluster_features: ["mapper.synthetic_source_keep"]
+ reason: requires tracking ignored source
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ index:
+ synthetic_source:
+ enable_second_doc_parsing_pass: false
+ mappings:
+ _source:
+ mode: synthetic
+ properties:
+ name:
+ type: keyword
+ path:
+ properties:
+ to:
+ properties:
+ some:
+ synthetic_source_keep: arrays
+ properties:
+ id:
+ type: integer
+
+ - do:
+ bulk:
+ index: test
+ refresh: true
+ body:
+ - '{ "create": { } }'
+ - '{ "name": "A", "path": [ { "to": [ { "some" : [ { "id": 10 }, { "id": [1, 3, 2] } ] }, { "some": { "id": 100 } } ] }, { "to": { "some": { "id": [1000, 2000] } } } ] }'
+ - match: { errors: false }
+
+ - do:
+ search:
+ index: test
+ sort: name
+ - match: { hits.hits.0._source.name: A }
+ - length: { hits.hits.0._source.path.to.some: 2}
+ - match: { hits.hits.0._source.path.to.some.0.id: 10 }
+ - match: { hits.hits.0._source.path.to.some.1.id: [ 1, 3, 2] }
+
+
---
# 112156
stored field under object with store_array_source:
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml
index ed469ffd7ff16..02576ad1b2b01 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_bit.yml
@@ -354,3 +354,54 @@ setup:
dims: 40
index: true
similarity: max_inner_product
+
+
+---
+"Search with synthetic source":
+ - requires:
+ capabilities:
+ - method: POST
+ path: /_search
+ capabilities: [ bit_dense_vector_synthetic_source ]
+ test_runner_features: capabilities
+ reason: "Support for bit dense vector synthetic source capability required"
+ - do:
+ indices.create:
+ index: test_synthetic_source
+ body:
+ mappings:
+ properties:
+ name:
+ type: keyword
+ vector1:
+ type: dense_vector
+ element_type: bit
+ dims: 40
+ index: false
+ vector2:
+ type: dense_vector
+ element_type: bit
+ dims: 40
+ index: true
+ similarity: l2_norm
+
+ - do:
+ index:
+ index: test_synthetic_source
+ id: "1"
+ body:
+ name: cow.jpg
+ vector1: [2, -1, 1, 4, -3]
+ vector2: [2, -1, 1, 4, -3]
+
+ - do:
+ indices.refresh: {}
+
+ - do:
+ search:
+ force_synthetic_source: true
+ index: test_synthetic_source
+
+ - match: {hits.hits.0._id: "1"}
+ - match: {hits.hits.0._source.vector1: [2, -1, 1, 4, -3]}
+ - match: {hits.hits.0._source.vector2: [2, -1, 1, 4, -3]}
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml
index 66b05e4d0d156..16574ceb587b4 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/70_dense_vector_telemetry.yml
@@ -21,13 +21,13 @@ setup:
element_type: byte
index_options:
type: hnsw
+ m: 16
+ ef_construction: 100
vector2:
type: dense_vector
dims: 1024
index: true
similarity: dot_product
- index_options:
- type: int8_hnsw
vector3:
type: dense_vector
dims: 100
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index 1787b4f784574..a8e2ca818d3f4 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -88,11 +88,11 @@ private static String format(ZonedDateTime date, String pattern) {
private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return prepareIndex(idx).setSource(
jsonBuilder().startObject()
- .timeField("date", date)
+ .timestampField("date", date)
.field("value", value)
.startArray("dates")
- .timeValue(date)
- .timeValue(date.plusMonths(1).plusDays(1))
+ .timestampValue(date)
+ .timestampValue(date.plusMonths(1).plusDays(1))
.endArray()
.endObject()
);
@@ -103,10 +103,10 @@ private IndexRequestBuilder indexDoc(int month, int day, int value) throws Excep
jsonBuilder().startObject()
.field("value", value)
.field("constant", 1)
- .timeField("date", date(month, day))
+ .timestampField("date", date(month, day))
.startArray("dates")
- .timeValue(date(month, day))
- .timeValue(date(month + 1, day + 1))
+ .timestampValue(date(month, day))
+ .timestampValue(date(month + 1, day + 1))
.endArray()
.endObject()
);
@@ -162,53 +162,53 @@ private void getMultiSortDocs(List builders) throws IOExcep
for (int i = 1; i <= 3; i++) {
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 1)).field("l", 1).field("d", i).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 2)).field("l", 2).field("d", i).endObject()
)
);
}
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()
)
);
builders.add(
prepareIndex("sort_idx").setSource(
- jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()
+ jsonBuilder().startObject().timestampField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()
)
);
}
@@ -997,7 +997,7 @@ public void testSingleValueWithTimeZone() throws Exception {
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
ZonedDateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
- reqs[i] = prepareIndex("idx2").setId("" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject());
+ reqs[i] = prepareIndex("idx2").setId("" + i).setSource(jsonBuilder().startObject().timestampField("date", date).endObject());
date = date.plusHours(1);
}
indexRandom(true, reqs);
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
index 0afc479474814..778be4ee0705f 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
@@ -63,7 +63,7 @@ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, i
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = prepareIndex("idx2").setId("" + i)
- .setSource(jsonBuilder().startObject().timeField("date", date).endObject());
+ .setSource(jsonBuilder().startObject().timestampField("date", date).endObject());
date = date.plusHours(stepSizeHours);
}
indexRandom(true, reqs);
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
index 6e9a9305eaf4e..afa3ad9d7e737 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
@@ -58,10 +58,10 @@ private static IndexRequestBuilder indexDoc(int month, int day, int value) throw
return prepareIndex("idx").setSource(
jsonBuilder().startObject()
.field("value", value)
- .timeField("date", date(month, day))
+ .timestampField("date", date(month, day))
.startArray("dates")
- .timeValue(date(month, day))
- .timeValue(date(month + 1, day + 1))
+ .timestampValue(date(month, day))
+ .timestampValue(date(month + 1, day + 1))
.endArray()
.endObject()
);
@@ -620,8 +620,8 @@ public void testScriptCaching() throws Exception {
);
indexRandom(
true,
- prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()),
- prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject())
+ prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().timestampField("date", date(1, 1)).endObject()),
+ prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().timestampField("date", date(2, 1)).endObject())
);
// Make sure we are starting with a clear cache
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index fcd87ae83d00a..e4cceb6977adb 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -242,6 +242,7 @@ static TransportVersion def(int id) {
public static final TransportVersion ESQL_CACHED_STRING_SERIALIZATION = def(8_766_00_0);
public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0);
public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0);
+ public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0);
/*
* STOP! READ THIS FIRST! No, really,
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
index 9ffef1f178f44..b855f2cee7613 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
@@ -17,6 +17,7 @@
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
@@ -84,7 +85,7 @@ protected void masterOperation(
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices());
Map indicesAndFilters = new HashMap<>();
- Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
+ Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
for (String index : concreteIndices) {
final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases);
final String[] aliases = indexNameExpressionResolver.indexAliases(
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java
index 6fe8432c31ccc..d942c4347960a 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java
@@ -79,7 +79,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field("index_name", info.indexName);
builder.field("index_uuid", info.indexUUID);
- builder.timeField("creation_date_millis", "creation_date", info.creationDateMillis);
+ builder.timestampFieldsFromUnixEpochMillis("creation_date_millis", "creation_date", info.creationDateMillis);
builder.array("node_ids", info.nodeIds.toArray(new String[0]));
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java
index 5c5c71bc002b3..f5c100b7884bb 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java
@@ -25,6 +25,7 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
@@ -565,8 +566,8 @@ static void resolveIndices(
if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) {
names = new String[] { "**" };
}
- Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names);
- for (String s : resolvedIndexAbstractions) {
+ Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names);
+ for (ResolvedExpression s : resolvedIndexAbstractions) {
enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams);
}
indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName));
@@ -597,12 +598,12 @@ private static void mergeResults(
private static void enrichIndexAbstraction(
ClusterState clusterState,
- String indexAbstraction,
+ ResolvedExpression indexAbstraction,
List indices,
List aliases,
List dataStreams
) {
- IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction);
+ IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource());
if (ia != null) {
switch (ia.getType()) {
case CONCRETE_INDEX -> {
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java
index 47abda4fabcde..347376a918d4c 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardResponse.java
@@ -69,7 +69,7 @@ public FieldUsageStats getStats() {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields.TRACKING_ID, trackingId);
- builder.timeField(Fields.TRACKING_STARTED_AT_MILLIS, Fields.TRACKING_STARTED_AT, trackingStartTime);
+ builder.timestampFieldsFromUnixEpochMillis(Fields.TRACKING_STARTED_AT_MILLIS, Fields.TRACKING_STARTED_AT, trackingStartTime);
builder.startObject(Fields.ROUTING)
.field(Fields.STATE, shardRouting.state())
.field(Fields.PRIMARY, shardRouting.primary())
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java
index ec8eb4babfdac..5e3799cd14518 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java
@@ -274,7 +274,7 @@ public static Template resolveTemplate(
Settings result = provider.getAdditionalIndexSettings(
indexName,
template.getDataStreamTemplate() != null ? indexName : null,
- template.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(template),
+ metadata.retrieveIndexModeFromTemplate(template),
simulatedState.getMetadata(),
now,
templateSettings,
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
index 4e9830fe0d14e..e01f364712676 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
@@ -21,6 +21,7 @@
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
@@ -133,7 +134,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener
@Override
protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) {
final ClusterState clusterState = clusterService.state();
- final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
+ final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases);
return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request);
}
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java
index f433e937dbe5d..a5a38a288d342 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java
@@ -18,12 +18,14 @@
import org.elasticsearch.xcontent.json.JsonXContent;
import java.io.IOException;
+import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
+import static org.elasticsearch.common.xcontent.XContentElasticsearchExtension.DEFAULT_FORMATTER;
import static org.elasticsearch.ingest.CompoundProcessor.PIPELINE_ORIGIN_EXCEPTION_HEADER;
import static org.elasticsearch.ingest.CompoundProcessor.PROCESSOR_TAG_EXCEPTION_HEADER;
import static org.elasticsearch.ingest.CompoundProcessor.PROCESSOR_TYPE_EXCEPTION_HEADER;
@@ -84,7 +86,7 @@ private static XContentBuilder createSource(IndexRequest source, Exception excep
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
{
- builder.timeField("@timestamp", timeSupplier.get());
+ builder.field("@timestamp", DEFAULT_FORMATTER.format(Instant.ofEpochMilli(timeSupplier.get())));
builder.startObject("document");
{
if (source.id() != null) {
diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java
index 2352628264394..94c294435acd3 100644
--- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java
+++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java
@@ -123,7 +123,7 @@ public XContentBuilder toXContent(
builder.field(MANAGED_BY_LIFECYCLE_FIELD.getPreferredName(), managedByLifecycle);
if (managedByLifecycle) {
if (indexCreationDate != null) {
- builder.timeField(
+ builder.timestampFieldsFromUnixEpochMillis(
INDEX_CREATION_DATE_MILLIS_FIELD.getPreferredName(),
INDEX_CREATION_DATE_FIELD.getPreferredName(),
indexCreationDate
@@ -134,7 +134,11 @@ public XContentBuilder toXContent(
);
}
if (rolloverDate != null) {
- builder.timeField(ROLLOVER_DATE_MILLIS_FIELD.getPreferredName(), ROLLOVER_DATE_FIELD.getPreferredName(), rolloverDate);
+ builder.timestampFieldsFromUnixEpochMillis(
+ ROLLOVER_DATE_MILLIS_FIELD.getPreferredName(),
+ ROLLOVER_DATE_FIELD.getPreferredName(),
+ rolloverDate
+ );
builder.field(TIME_SINCE_ROLLOVER_FIELD.getPreferredName(), getTimeSinceRollover(nowSupplier).toHumanReadableString(2));
}
if (generationDateMillis != null) {
diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
index 9c82d032014f2..84c6df7b8a66f 100644
--- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
+++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
@@ -18,6 +18,7 @@
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.Writeable;
@@ -109,7 +110,7 @@ protected boolean resolveIndex(ExplainRequest request) {
@Override
protected void resolveRequest(ClusterState state, InternalRequest request) {
- final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index());
+ final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index());
final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases);
request.request().filteringAlias(aliasFilter);
}
diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
index 223e72d6c901c..a5e2e59a45146 100644
--- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
@@ -37,6 +37,7 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
@@ -110,6 +111,7 @@
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.LongSupplier;
+import java.util.stream.Collectors;
import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
@@ -203,7 +205,7 @@ public TransportSearchAction(
private Map buildPerIndexOriginalIndices(
ClusterState clusterState,
- Set indicesAndAliases,
+ Set indicesAndAliases,
String[] indices,
IndicesOptions indicesOptions
) {
@@ -211,6 +213,9 @@ private Map buildPerIndexOriginalIndices(
var blocks = clusterState.blocks();
// optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking
boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false;
+ // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from
+ // concrete index to the expression that produced it.
+ Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet());
for (String index : indices) {
if (hasBlocks) {
blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index);
@@ -227,8 +232,8 @@ private Map buildPerIndexOriginalIndices(
String[] finalIndices = Strings.EMPTY_ARRAY;
if (aliases == null
|| aliases.length == 0
- || indicesAndAliases.contains(index)
- || hasDataStreamRef(clusterState, indicesAndAliases, index)) {
+ || indicesAndAliasesResources.contains(index)
+ || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) {
finalIndices = new String[] { index };
}
if (aliases != null) {
@@ -247,7 +252,11 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i
return indicesAndAliases.contains(ret.getParentDataStream().getName());
}
- Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) {
+ Map buildIndexAliasFilters(
+ ClusterState clusterState,
+ Set indicesAndAliases,
+ Index[] concreteIndices
+ ) {
final Map aliasFilterMap = new HashMap<>();
for (Index index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName());
@@ -1237,7 +1246,10 @@ private void executeSearch(
} else {
final Index[] indices = resolvedIndices.getConcreteLocalIndices();
concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new);
- final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices());
+ final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(
+ clusterState,
+ searchRequest.indices()
+ );
aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices);
aliasFilter.putAll(remoteAliasMap);
localShardIterators = getLocalShardsIterator(
@@ -1810,7 +1822,7 @@ List getLocalShardsIterator(
ClusterState clusterState,
SearchRequest searchRequest,
String clusterAlias,
- Set indicesAndAliases,
+ Set indicesAndAliases,
String[] concreteIndices
) {
var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices());
diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java
index f418b5617b2a1..b94bd95c93d8a 100644
--- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java
@@ -17,6 +17,7 @@
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.index.Index;
@@ -127,7 +128,10 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act
searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null),
listener.delegateFailureAndWrap((delegate, searchRequest) -> {
Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices();
- final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices());
+ final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(
+ clusterState,
+ searchRequest.indices()
+ );
final Map aliasFilters = transportSearchAction.buildIndexAliasFilters(
clusterState,
indicesAndAliases,
diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java
index cb98cd4b2f535..ac96a2d55bc71 100644
--- a/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java
+++ b/server/src/main/java/org/elasticsearch/cluster/ClusterSnapshotStats.java
@@ -228,7 +228,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.endObject();
builder.endObject();
- builder.timeField("oldest_start_time_millis", "oldest_start_time", firstStartTimeMillis);
+ builder.timestampFieldsFromUnixEpochMillis("oldest_start_time_millis", "oldest_start_time", firstStartTimeMillis);
return builder.endObject();
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java
index c371ff4d37a05..fe144135d42bd 100644
--- a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java
+++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java
@@ -180,7 +180,7 @@ public Iterator extends ToXContent> toXContentChunked(ToXContent.Params ignore
builder.value(snapshot.getName());
}
builder.endArray();
- builder.timeField("start_time_millis", "start_time", entry.startTime);
+ builder.timestampFieldsFromUnixEpochMillis("start_time_millis", "start_time", entry.startTime);
builder.field("repository_state_id", entry.repositoryStateId);
builder.field("state", entry.state);
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
index c32175fc9367d..d82a31720d6d4 100644
--- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
+++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
@@ -1404,7 +1404,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
}
}
builder.endArray();
- builder.timeField("start_time_millis", "start_time", startTime);
+ builder.timestampFieldsFromUnixEpochMillis("start_time_millis", "start_time", startTime);
builder.field("repository_state_id", repositoryStateId);
builder.startArray("shards");
{
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
index 783145d3618f1..320be8acb0af9 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
@@ -434,7 +434,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
builder.startObject();
builder.field(INDEX_KEY);
index.toXContent(builder, params);
- builder.timeField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis);
+ builder.timestampFieldsFromUnixEpochMillis(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis);
return builder.endObject();
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index 2229166a2d779..eaf54034b22e0 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -74,6 +74,15 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy
this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null");
}
+ /**
+ * This contains the resolved expression in the form of the resource.
+ * Soon it will facilitate the index component selector.
+ * @param resource the resolved resolvedExpression
+ */
+ public record ResolvedExpression(String resource) {
+
+ }
+
/**
* Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options
* are encapsulated in the specified request.
@@ -191,8 +200,9 @@ public List dataStreamNames(ClusterState state, IndicesOptions options,
getSystemIndexAccessPredicate(),
getNetNewSystemIndexPredicate()
);
- final Collection expressions = resolveExpressions(context, indexExpressions);
+ final Collection expressions = resolveExpressions(context, indexExpressions);
return expressions.stream()
+ .map(ResolvedExpression::resource)
.map(x -> state.metadata().getIndicesLookup().get(x))
.filter(Objects::nonNull)
.filter(ia -> ia.getType() == Type.DATA_STREAM)
@@ -221,10 +231,11 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit
getNetNewSystemIndexPredicate()
);
- final Collection expressions = resolveExpressions(context, request.index());
+ final Collection expressions = resolveExpressions(context, request.index());
if (expressions.size() == 1) {
- IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next());
+ ResolvedExpression resolvedExpression = expressions.iterator().next();
+ IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource());
if (ia.getType() == Type.ALIAS) {
Index writeIndex = ia.getWriteIndex();
if (writeIndex == null) {
@@ -246,14 +257,14 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit
}
}
- protected static Collection resolveExpressions(Context context, String... expressions) {
+ protected static Collection resolveExpressions(Context context, String... expressions) {
if (context.getOptions().expandWildcardExpressions() == false) {
if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) {
return List.of();
} else {
return ExplicitResourceNameFilter.filterUnavailable(
context,
- DateMathExpressionResolver.resolve(context, List.of(expressions))
+ DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList())
);
}
} else {
@@ -264,7 +275,10 @@ protected static Collection resolveExpressions(Context context, String..
} else {
return WildcardExpressionResolver.resolve(
context,
- ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions)))
+ ExplicitResourceNameFilter.filterUnavailable(
+ context,
+ DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList())
+ )
);
}
}
@@ -339,12 +353,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) {
}
Index[] concreteIndices(Context context, String... indexExpressions) {
- final Collection expressions = resolveExpressions(context, indexExpressions);
+ final Collection expressions = resolveExpressions(context, indexExpressions);
final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size());
final Map indicesLookup = context.getState().metadata().getIndicesLookup();
- for (String expression : expressions) {
- final IndexAbstraction indexAbstraction = indicesLookup.get(expression);
+ for (ResolvedExpression resolvedExpression : expressions) {
+ final IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource());
assert indexAbstraction != null;
if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) {
Index writeIndex = indexAbstraction.getWriteIndex();
@@ -378,7 +392,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) {
throw new IllegalArgumentException(
indexAbstraction.getType().getDisplayName()
+ " ["
- + expression
+ + resolvedExpression.resource()
+ "] has more than one index associated with it "
+ Arrays.toString(indexNames)
+ ", can't execute a single index op"
@@ -642,7 +656,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) {
* Utility method that allows to resolve an index expression to its corresponding single write index.
*
* @param state the cluster state containing all the data to resolve to expression to a concrete index
- * @param request The request that defines how the an alias or an index need to be resolved to a concrete index
+ * @param request The request that defines how an alias or an index need to be resolved to a concrete index
* and the expression that can be resolved to an alias or an index name.
* @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index
* @return the write index obtained as a result of the index resolution
@@ -734,7 +748,7 @@ public static String resolveDateMathExpression(String dateExpression, long time)
/**
* Resolve an array of expressions to the set of indices and aliases that these expressions match.
*/
- public Set resolveExpressions(ClusterState state, String... expressions) {
+ public Set resolveExpressions(ClusterState state, String... expressions) {
return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions);
}
@@ -743,7 +757,7 @@ public Set resolveExpressions(ClusterState state, String... expressions)
* If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the
* {@param expressions} are returned as-is, without expanding them further to their respective backing indices.
*/
- public Set resolveExpressions(
+ public Set resolveExpressions(
ClusterState state,
IndicesOptions indicesOptions,
boolean preserveDataStreams,
@@ -760,10 +774,10 @@ public Set resolveExpressions(
getSystemIndexAccessPredicate(),
getNetNewSystemIndexPredicate()
);
- Collection resolved = resolveExpressions(context, expressions);
- if (resolved instanceof Set) {
+ Collection resolved = resolveExpressions(context, expressions);
+ if (resolved instanceof Set) {
// unmodifiable without creating a new collection as it might contain many items
- return Collections.unmodifiableSet((Set) resolved);
+ return Collections.unmodifiableSet((Set) resolved);
} else {
return Set.copyOf(resolved);
}
@@ -776,7 +790,7 @@ public Set resolveExpressions(
* the index itself - null is returned. Returns {@code null} if no filtering is required.
* NOTE : The provided expressions must have been resolved already via {@link #resolveExpressions}.
*/
- public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) {
+ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) {
return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions);
}
@@ -802,39 +816,39 @@ public String[] indexAliases(
Predicate requiredAlias,
Predicate requiredDataStreamAlias,
boolean skipIdentity,
- Set resolvedExpressions
+ Set resolvedExpressions
) {
- if (isAllIndices(resolvedExpressions)) {
+ if (isAllIndicesExpression(resolvedExpressions)) {
return null;
}
-
+ Set resources = resolvedExpressions.stream().map(ResolvedExpression::resource).collect(Collectors.toSet());
final IndexMetadata indexMetadata = state.metadata().getIndices().get(index);
if (indexMetadata == null) {
// Shouldn't happen
throw new IndexNotFoundException(index);
}
- if (skipIdentity == false && resolvedExpressions.contains(index)) {
+ if (skipIdentity == false && resources.contains(index)) {
return null;
}
IndexAbstraction ia = state.metadata().getIndicesLookup().get(index);
DataStream dataStream = ia.getParentDataStream();
if (dataStream != null) {
- if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) {
+ if (skipIdentity == false && resources.contains(dataStream.getName())) {
// skip the filters when the request targets the data stream name
return null;
}
Map dataStreamAliases = state.metadata().dataStreamAliases();
List aliasesForDataStream;
- if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) {
+ if (iterateIndexAliases(dataStreamAliases.size(), resources.size())) {
aliasesForDataStream = dataStreamAliases.values()
.stream()
- .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName()))
+ .filter(dataStreamAlias -> resources.contains(dataStreamAlias.getName()))
.filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName()))
.toList();
} else {
- aliasesForDataStream = resolvedExpressions.stream()
+ aliasesForDataStream = resources.stream()
.map(dataStreamAliases::get)
.filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName()))
.toList();
@@ -859,18 +873,15 @@ public String[] indexAliases(
} else {
final Map indexAliases = indexMetadata.getAliases();
final AliasMetadata[] aliasCandidates;
- if (iterateIndexAliases(indexAliases.size(), resolvedExpressions.size())) {
+ if (iterateIndexAliases(indexAliases.size(), resources.size())) {
// faster to iterate indexAliases
aliasCandidates = indexAliases.values()
.stream()
- .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias()))
+ .filter(aliasMetadata -> resources.contains(aliasMetadata.alias()))
.toArray(AliasMetadata[]::new);
} else {
// faster to iterate resolvedExpressions
- aliasCandidates = resolvedExpressions.stream()
- .map(indexAliases::get)
- .filter(Objects::nonNull)
- .toArray(AliasMetadata[]::new);
+ aliasCandidates = resources.stream().map(indexAliases::get).filter(Objects::nonNull).toArray(AliasMetadata[]::new);
}
List aliases = null;
for (AliasMetadata aliasMetadata : aliasCandidates) {
@@ -909,12 +920,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab
getSystemIndexAccessPredicate(),
getNetNewSystemIndexPredicate()
);
- final Collection resolvedExpressions = resolveExpressions(context, expressions);
-
- // TODO: it appears that this can never be true?
- if (isAllIndices(resolvedExpressions)) {
- return resolveSearchRoutingAllIndices(state.metadata(), routing);
- }
+ final Collection resolvedExpressions = resolveExpressions(context, expressions);
Map> routings = null;
Set paramRouting = null;
@@ -924,8 +930,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab
paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing));
}
- for (String expression : resolvedExpressions) {
- IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression);
+ for (ResolvedExpression resolvedExpression : resolvedExpressions) {
+ IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource);
if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) {
for (Index index : indexAbstraction.getIndices()) {
String concreteIndex = index.getName();
@@ -963,7 +969,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab
}
} else {
// Index
- routings = collectRoutings(routings, paramRouting, norouting, expression);
+ routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource());
}
}
@@ -1009,6 +1015,17 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m
return null;
}
+ /**
+ * Identifies whether the array containing index names given as argument refers to all indices
+ * The empty or null array identifies all indices
+ *
+ * @param aliasesOrIndices the array containing index names
+ * @return true if the provided array maps to all indices, false otherwise
+ */
+ public static boolean isAllIndicesExpression(Collection aliasesOrIndices) {
+ return isAllIndices(aliasesOrIndices.stream().map(ResolvedExpression::resource).toList());
+ }
+
/**
* Identifies whether the array containing index names given as argument refers to all indices
* The empty or null array identifies all indices
@@ -1249,8 +1266,8 @@ private WildcardExpressionResolver() {
* Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters.
* Depending on the context, returns the names of the datastreams themselves or their backing indices.
*/
- public static Collection resolveAll(Context context) {
- List concreteIndices = resolveEmptyOrTrivialWildcard(context);
+ public static Collection resolveAll(Context context) {
+ List concreteIndices = resolveEmptyOrTrivialWildcard(context);
if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) {
return concreteIndices;
@@ -1265,7 +1282,7 @@ public static Collection resolveAll(Context context) {
.filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context))
.filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName()));
- Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet());
+ Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet());
resolved.addAll(concreteIndices);
return resolved;
}
@@ -1293,17 +1310,17 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres
* ultimately returned, instead of the alias or datastream name
*
*/
- public static Collection resolve(Context context, List expressions) {
+ public static Collection resolve(Context context, List expressions) {
ExpressionList expressionList = new ExpressionList(context, expressions);
// fast exit if there are no wildcards to evaluate
if (expressionList.hasWildcard() == false) {
return expressions;
}
- Set result = new HashSet<>();
+ Set result = new HashSet<>();
for (ExpressionList.Expression expression : expressionList) {
if (expression.isWildcard()) {
Stream matchingResources = matchResourcesToWildcard(context, expression.get());
- Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources);
+ Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources);
AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false);
if (context.getOptions().allowNoIndices() == false) {
emptyWildcardExpansion.set(true);
@@ -1319,9 +1336,9 @@ public static Collection resolve(Context context, List expressio
}
} else {
if (expression.isExclusion()) {
- result.remove(expression.get());
+ result.remove(new ResolvedExpression(expression.get()));
} else {
- result.add(expression.get());
+ result.add(expression.resolvedExpression());
}
}
}
@@ -1412,13 +1429,13 @@ private static Map filterIndicesLookupForSuffixWildcar
* Data streams and aliases are interpreted to refer to multiple indices,
* then all index resources are filtered by their open/closed status.
*/
- private static Stream expandToOpenClosed(Context context, Stream resources) {
+ private static Stream expandToOpenClosed(Context context, Stream resources) {
final IndexMetadata.State excludeState = excludeState(context.getOptions());
return resources.flatMap(indexAbstraction -> {
if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) {
- return Stream.of(indexAbstraction.getName());
+ return Stream.of(new ResolvedExpression(indexAbstraction.getName()));
} else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) {
- return Stream.of(indexAbstraction.getName());
+ return Stream.of(new ResolvedExpression(indexAbstraction.getName()));
} else {
Stream indicesStateStream = Stream.of();
if (shouldIncludeRegularIndices(context.getOptions())) {
@@ -1434,18 +1451,20 @@ private static Stream expandToOpenClosed(Context context, Stream indexMeta.getState() != excludeState);
}
- return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName());
+ return indicesStateStream.map(indexMeta -> new ResolvedExpression(indexMeta.getIndex().getName()));
}
});
}
- private static List resolveEmptyOrTrivialWildcard(Context context) {
+ private static List resolveEmptyOrTrivialWildcard(Context context) {
final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata());
+ Stream result;
if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) {
- return List.of(allIndices);
+ result = Arrays.stream(allIndices);
} else {
- return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices);
+ result = resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices).stream();
}
+ return result.map(ResolvedExpression::new).toList();
}
private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) {
@@ -1507,8 +1526,8 @@ private DateMathExpressionResolver() {
// utility class
}
- public static List resolve(Context context, List expressions) {
- List result = new ArrayList<>(expressions.size());
+ public static List resolve(Context context, List expressions) {
+ List result = new ArrayList<>(expressions.size());
for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) {
result.add(resolveExpression(expression, context::getStartTime));
}
@@ -1519,13 +1538,15 @@ static String resolveExpression(String expression) {
return resolveExpression(expression, System::currentTimeMillis);
}
- static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) {
+ static ResolvedExpression resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) {
+ String result;
if (expression.isExclusion()) {
// accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template
- return "-" + resolveExpression(expression.get(), getTime);
+ result = "-" + resolveExpression(expression.get(), getTime);
} else {
- return resolveExpression(expression.get(), getTime);
+ result = resolveExpression(expression.get(), getTime);
}
+ return new ResolvedExpression(result);
}
static String resolveExpression(String expression, LongSupplier getTime) {
@@ -1687,25 +1708,26 @@ private ExplicitResourceNameFilter() {
* Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out.
* Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in.
*/
- public static List filterUnavailable(Context context, List expressions) {
+ public static List filterUnavailable(Context context, List expressions) {
ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions);
- List result = new ArrayList<>(expressions.size());
+ List result = new ArrayList<>(expressions.size());
for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) {
validateAliasOrIndex(expression);
- if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) {
- result.add(expression.expression());
+ if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression)) {
+ result.add(expression.resolvedExpression());
}
}
return result;
}
/**
- * This returns `true` if the given {@param name} is of a resource that exists.
- * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of
+ * This returns `true` if the given {@param resolvedExpression} is of a resource that exists.
+ * Otherwise, it returns `false` if the `ignore_unavailable` option is `true`, or, if `false`, it throws a "not found" type of
* exception.
*/
@Nullable
- private static boolean ensureAliasOrIndexExists(Context context, String name) {
+ private static boolean ensureAliasOrIndexExists(Context context, ExpressionList.Expression expression) {
+ String name = expression.get();
boolean ignoreUnavailable = context.getOptions().ignoreUnavailable();
IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name);
if (indexAbstraction == null) {
@@ -1737,32 +1759,37 @@ private static boolean ensureAliasOrIndexExists(Context context, String name) {
}
private static void validateAliasOrIndex(ExpressionList.Expression expression) {
- if (Strings.isEmpty(expression.expression())) {
- throw notFoundException(expression.expression());
+ if (Strings.isEmpty(expression.resolvedExpression().resource())) {
+ throw notFoundException(expression.get());
}
// Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API
// does not exist and the path is interpreted as an expression. If the expression begins with an underscore,
// throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown
// if the expression can't be found.
- if (expression.expression().charAt(0) == '_') {
- throw new InvalidIndexNameException(expression.expression(), "must not start with '_'.");
+ if (expression.resolvedExpression().resource().charAt(0) == '_') {
+ throw new InvalidIndexNameException(expression.get(), "must not start with '_'.");
}
}
- private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) {
+ private static void ensureRemoteIndicesRequireIgnoreUnavailable(
+ IndicesOptions options,
+ List resolvedExpressions
+ ) {
if (options.ignoreUnavailable()) {
return;
}
- for (String index : indexExpressions) {
+ for (ResolvedExpression resolvedExpression : resolvedExpressions) {
+ var index = resolvedExpression.resource();
if (RemoteClusterAware.isRemoteIndexName(index)) {
- failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions);
+ failOnRemoteIndicesNotIgnoringUnavailable(resolvedExpressions);
}
}
}
- private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) {
+ private static void failOnRemoteIndicesNotIgnoringUnavailable(List resolvedExpressions) {
List crossClusterIndices = new ArrayList<>();
- for (String index : indexExpressions) {
+ for (ResolvedExpression resolvedExpression : resolvedExpressions) {
+ String index = resolvedExpression.resource();
if (RemoteClusterAware.isRemoteIndexName(index)) {
crossClusterIndices.add(index);
}
@@ -1780,13 +1807,13 @@ public static final class ExpressionList implements Iterable expressionsList;
private final boolean hasWildcard;
- public record Expression(String expression, boolean isWildcard, boolean isExclusion) {
+ public record Expression(ResolvedExpression resolvedExpression, boolean isWildcard, boolean isExclusion) {
public String get() {
if (isExclusion()) {
// drop the leading "-" if exclusion because it is easier for callers to handle it like this
- return expression().substring(1);
+ return resolvedExpression().resource().substring(1);
} else {
- return expression();
+ return resolvedExpression().resource();
}
}
}
@@ -1795,16 +1822,17 @@ public String get() {
* Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both).
* The {@param context} is used to check if wildcards ought to be considered or not.
*/
- public ExpressionList(Context context, List expressionStrings) {
- List expressionsList = new ArrayList<>(expressionStrings.size());
+ public ExpressionList(Context context, List resolvedExpressions) {
+ List expressionsList = new ArrayList<>(resolvedExpressions.size());
boolean wildcardSeen = false;
- for (String expressionString : expressionStrings) {
+ for (ResolvedExpression resolvedExpression : resolvedExpressions) {
+ var expressionString = resolvedExpression.resource();
boolean isExclusion = expressionString.startsWith("-") && wildcardSeen;
if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) {
wildcardSeen = true;
- expressionsList.add(new Expression(expressionString, true, isExclusion));
+ expressionsList.add(new Expression(resolvedExpression, true, isExclusion));
} else {
- expressionsList.add(new Expression(expressionString, false, isExclusion));
+ expressionsList.add(new Expression(resolvedExpression, false, isExclusion));
}
}
this.expressionsList = expressionsList;
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
index 3a390a64bb993..7ddacc26eddec 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
@@ -1310,23 +1310,6 @@ public Map templatesV2() {
.orElse(Collections.emptyMap());
}
- // TODO: remove this method:
- public boolean isTimeSeriesTemplate(ComposableIndexTemplate indexTemplate) {
- var indexModeFromTemplate = retrieveIndexModeFromTemplate(indexTemplate);
- if (indexModeFromTemplate == IndexMode.TIME_SERIES) {
- // No need to check for the existence of index.routing_path here, because index.mode=time_series can't be specified without it.
- // Setting validation takes care of this.
- // Also no need to validate that the fields defined in index.routing_path are keyword fields with time_series_dimension
- // attribute enabled. This is validated elsewhere (DocumentMapper).
- return true;
- }
-
- // in a followup change: check the existence of keyword fields of type keyword and time_series_dimension attribute enabled in
- // the template. In this case the index.routing_path setting can be generated from the mapping.
-
- return false;
- }
-
public IndexMode retrieveIndexModeFromTemplate(ComposableIndexTemplate indexTemplate) {
if (indexTemplate.getDataStreamTemplate() == null) {
return null;
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java
index 1cebbabde0769..7f2c076281735 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java
@@ -982,10 +982,10 @@ static Settings aggregateIndexSettings(
if (sourceMetadata == null) {
final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build();
- final boolean timeSeriesTemplate = Optional.of(request)
+ final IndexMode templateIndexMode = Optional.of(request)
.map(CreateIndexClusterStateUpdateRequest::matchingTemplate)
- .map(metadata::isTimeSeriesTemplate)
- .orElse(false);
+ .map(metadata::retrieveIndexModeFromTemplate)
+ .orElse(null);
// Loop through all the explicit index setting providers, adding them to the
// additionalIndexSettings map
@@ -995,7 +995,7 @@ static Settings aggregateIndexSettings(
var newAdditionalSettings = provider.getAdditionalIndexSettings(
request.index(),
request.dataStreamName(),
- timeSeriesTemplate,
+ templateIndexMode,
currentState.getMetadata(),
resolvedAt,
templateAndRequestSettings,
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
index 57194ded9422e..ccdfaa5518aee 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
@@ -705,7 +705,7 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT
var newAdditionalSettings = provider.getAdditionalIndexSettings(
"validate-index-name",
indexTemplate.getDataStreamTemplate() != null ? "validate-data-stream-name" : null,
- indexTemplate.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(indexTemplate),
+ metadata.retrieveIndexModeFromTemplate(indexTemplate),
currentState.getMetadata(),
now,
combinedSettings,
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
index 4257543498c54..aa8b092ffcca0 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java
@@ -266,7 +266,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(NODE_ID_FIELD.getPreferredName(), nodeId);
builder.field(TYPE_FIELD.getPreferredName(), type);
builder.field(REASON_FIELD.getPreferredName(), reason);
- builder.timeField(STARTED_AT_MILLIS_FIELD.getPreferredName(), STARTED_AT_READABLE_FIELD, startedAtMillis);
+ builder.timestampFieldsFromUnixEpochMillis(
+ STARTED_AT_MILLIS_FIELD.getPreferredName(),
+ STARTED_AT_READABLE_FIELD,
+ startedAtMillis
+ );
builder.field(NODE_SEEN_FIELD.getPreferredName(), nodeSeen);
if (allocationDelay != null) {
builder.field(ALLOCATION_DELAY_FIELD.getPreferredName(), allocationDelay.getStringRep());
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java
index 767520d34058c..9970088ec4c33 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java
@@ -21,6 +21,7 @@
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.Nullable;
+import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettingProvider;
import org.elasticsearch.snapshots.SearchableSnapshotsSettings;
@@ -226,7 +227,7 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP
public Settings getAdditionalIndexSettings(
String indexName,
@Nullable String dataStreamName,
- boolean isTimeSeries,
+ IndexMode templateIndexMode,
Metadata metadata,
Instant resolvedAt,
Settings indexTemplateAndCreateRequestSettings,
diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
index 0258fdc77eadf..5a616482c51c0 100644
--- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
+++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
@@ -35,6 +35,7 @@
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.FsDirectoryFactory;
import org.elasticsearch.index.store.Store;
@@ -187,6 +188,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING,
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING,
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING,
+ IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING,
+ SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING,
// validate that built-in similarities don't get redefined
Setting.groupSetting("index.similarity.", (s) -> {
diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
index dea851b1b553a..0298e1a123b58 100644
--- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
+++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
@@ -57,13 +57,13 @@ public Map, XContentBuilder.Writer> getXContentWriters() {
// Fully-qualified here to reduce ambiguity around our (ES') Version class
writers.put(org.apache.lucene.util.Version.class, (b, v) -> b.value(Objects.toString(v)));
writers.put(TimeValue.class, (b, v) -> b.value(v.toString()));
- writers.put(ZonedDateTime.class, XContentBuilder::timeValue);
- writers.put(OffsetDateTime.class, XContentBuilder::timeValue);
- writers.put(OffsetTime.class, XContentBuilder::timeValue);
- writers.put(java.time.Instant.class, XContentBuilder::timeValue);
- writers.put(LocalDateTime.class, XContentBuilder::timeValue);
- writers.put(LocalDate.class, XContentBuilder::timeValue);
- writers.put(LocalTime.class, XContentBuilder::timeValue);
+ writers.put(ZonedDateTime.class, XContentBuilder::timestampValue);
+ writers.put(OffsetDateTime.class, XContentBuilder::timestampValue);
+ writers.put(OffsetTime.class, XContentBuilder::timestampValue);
+ writers.put(java.time.Instant.class, XContentBuilder::timestampValue);
+ writers.put(LocalDateTime.class, XContentBuilder::timestampValue);
+ writers.put(LocalDate.class, XContentBuilder::timestampValue);
+ writers.put(LocalTime.class, XContentBuilder::timestampValue);
writers.put(DayOfWeek.class, (b, v) -> b.value(v.toString()));
writers.put(Month.class, (b, v) -> b.value(v.toString()));
writers.put(MonthDay.class, (b, v) -> b.value(v.toString()));
@@ -103,10 +103,8 @@ public Map, XContentBuilder.HumanReadableTransformer> getXContentHumanR
public Map, Function> getDateTransformers() {
Map, Function> transformers = new HashMap<>();
transformers.put(Date.class, d -> DEFAULT_FORMATTER.format(((Date) d).toInstant()));
- transformers.put(Long.class, d -> DEFAULT_FORMATTER.format(Instant.ofEpochMilli((long) d)));
transformers.put(Calendar.class, d -> DEFAULT_FORMATTER.format(((Calendar) d).toInstant()));
transformers.put(GregorianCalendar.class, d -> DEFAULT_FORMATTER.format(((Calendar) d).toInstant()));
- transformers.put(Instant.class, d -> DEFAULT_FORMATTER.format((Instant) d));
transformers.put(ZonedDateTime.class, d -> DEFAULT_FORMATTER.format((ZonedDateTime) d));
transformers.put(OffsetDateTime.class, d -> DEFAULT_FORMATTER.format((OffsetDateTime) d));
transformers.put(OffsetTime.class, d -> OFFSET_TIME_FORMATTER.format((OffsetTime) d));
@@ -119,4 +117,9 @@ public Map, Function> getDateTransformers() {
transformers.put(LocalTime.class, d -> LOCAL_TIME_FORMATTER.format((LocalTime) d));
return transformers;
}
+
+ @Override
+ public String formatUnixEpochMillis(long unixEpochMillis) {
+ return DEFAULT_FORMATTER.format(Instant.ofEpochMilli(unixEpochMillis));
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java
index aaa4c738c0e13..0180d2c8df119 100644
--- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java
+++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java
@@ -30,20 +30,21 @@ public interface IndexSettingProvider {
* Returns explicitly set default index {@link Settings} for the given index. This should not
* return null.
*
- * @param indexName The name of the new index being created
- * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise
- * null
- * @param isTimeSeries Whether the template is in time series mode.
- * @param metadata The current metadata instance that doesn't yet contain the index to be created
- * @param resolvedAt The time the request to create this new index was accepted.
- * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings
- * defined on the create index request
- * @param combinedTemplateMappings All the mappings resolved from the template that matches
+ * @param indexName The name of the new index being created
+ * @param dataStreamName The name of the data stream if the index being created is part of a data stream
+ * otherwise null
+ * @param templateIndexMode The index mode defined in template if template creates data streams,
+ * otherwise null
is returned.
+ * @param metadata The current metadata instance that doesn't yet contain the index to be created
+ * @param resolvedAt The time the request to create this new index was accepted.
+ * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings
+ * defined on the create index request
+ * @param combinedTemplateMappings All the mappings resolved from the template that matches
*/
Settings getAdditionalIndexSettings(
String indexName,
@Nullable String dataStreamName,
- boolean isTimeSeries,
+ @Nullable IndexMode templateIndexMode,
Metadata metadata,
Instant resolvedAt,
Settings indexTemplateAndCreateRequestSettings,
diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java
index c97ba3953a58d..b15c319b5462b 100644
--- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java
+++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java
@@ -28,6 +28,7 @@
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
+import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.ingest.IngestService;
import org.elasticsearch.node.Node;
@@ -654,6 +655,13 @@ public Iterator> settings() {
Property.Final
);
+ public static final Setting SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING = Setting.boolSetting(
+ "index.synthetic_source.enable_second_doc_parsing_pass",
+ true,
+ Property.IndexScope,
+ Property.Dynamic
+ );
+
/**
* Returns true
if TSDB encoding is enabled. The default is true
*/
@@ -807,6 +815,8 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
private volatile long mappingDimensionFieldsLimit;
private volatile boolean skipIgnoredSourceWrite;
private volatile boolean skipIgnoredSourceRead;
+ private volatile boolean syntheticSourceSecondDocParsingPassEnabled;
+ private final SourceFieldMapper.Mode indexMappingSourceMode;
/**
* The maximum number of refresh listeners allows on this shard.
@@ -967,6 +977,8 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING);
skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING);
skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING);
+ syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING);
+ indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING);
scopedSettings.addSettingsUpdateConsumer(
MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING,
@@ -1054,6 +1066,10 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
this::setSkipIgnoredSourceWrite
);
scopedSettings.addSettingsUpdateConsumer(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, this::setSkipIgnoredSourceRead);
+ scopedSettings.addSettingsUpdateConsumer(
+ SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING,
+ this::setSyntheticSourceSecondDocParsingPassEnabled
+ );
}
private void setSearchIdleAfter(TimeValue searchIdleAfter) {
@@ -1646,6 +1662,18 @@ private void setSkipIgnoredSourceRead(boolean value) {
this.skipIgnoredSourceRead = value;
}
+ private void setSyntheticSourceSecondDocParsingPassEnabled(boolean syntheticSourceSecondDocParsingPassEnabled) {
+ this.syntheticSourceSecondDocParsingPassEnabled = syntheticSourceSecondDocParsingPassEnabled;
+ }
+
+ public boolean isSyntheticSourceSecondDocParsingPassEnabled() {
+ return syntheticSourceSecondDocParsingPassEnabled;
+ }
+
+ public SourceFieldMapper.Mode getIndexMappingSourceMode() {
+ return indexMappingSourceMode;
+ }
+
/**
* The bounds for {@code @timestamp} on this index or
* {@code null} if there are no bounds.
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java
index 4313aa40cf13e..e6ca1eb014ffe 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java
@@ -41,6 +41,7 @@
import java.io.IOException;
import static org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat.DYNAMIC_CONFIDENCE_INTERVAL;
+import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT;
public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat {
@@ -289,4 +290,9 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra
return delegate.getRandomVectorScorer(sim, values, query);
}
}
+
+ @Override
+ public int getMaxDimensions(String fieldName) {
+ return MAX_DIMS_COUNT;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java
index f1ae4e3fdeded..eda1596b89597 100644
--- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java
+++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java
@@ -25,6 +25,8 @@
import java.io.IOException;
+import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT;
+
class ES815BitFlatVectorsFormat extends FlatVectorsFormat {
private final FlatVectorsFormat delegate = new Lucene99FlatVectorsFormat(FlatBitVectorScorer.INSTANCE);
@@ -43,6 +45,11 @@ public FlatVectorsReader fieldsReader(SegmentReadState segmentReadState) throws
return delegate.fieldsReader(segmentReadState);
}
+ @Override
+ public int getMaxDimensions(String fieldName) {
+ return MAX_DIMS_COUNT;
+ }
+
static class FlatBitVectorScorer implements FlatVectorsScorer {
static final FlatBitVectorScorer INSTANCE = new FlatBitVectorScorer();
diff --git a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
index d321600e03bf9..90f8e6adab73d 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
@@ -15,11 +15,7 @@
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.util.SameThreadExecutorService;
import org.elasticsearch.common.logging.Loggers;
-import org.elasticsearch.common.metrics.CounterMetric;
-import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.IndexSettings;
@@ -29,8 +25,6 @@
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
-import java.util.Collections;
-import java.util.Locale;
import java.util.Set;
import java.util.concurrent.Executor;
@@ -38,23 +32,13 @@
* An extension to the {@link ConcurrentMergeScheduler} that provides tracking on merge times, total
* and current merges.
*/
-class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
+public class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler implements ElasticsearchMergeScheduler {
protected final Logger logger;
private final Settings indexSettings;
private final ShardId shardId;
- private final MeanMetric totalMerges = new MeanMetric();
- private final CounterMetric totalMergesNumDocs = new CounterMetric();
- private final CounterMetric totalMergesSizeInBytes = new CounterMetric();
- private final CounterMetric currentMerges = new CounterMetric();
- private final CounterMetric currentMergesNumDocs = new CounterMetric();
- private final CounterMetric currentMergesSizeInBytes = new CounterMetric();
- private final CounterMetric totalMergeStoppedTime = new CounterMetric();
- private final CounterMetric totalMergeThrottledTime = new CounterMetric();
-
- private final Set onGoingMerges = ConcurrentCollections.newConcurrentSet();
- private final Set readOnlyOnGoingMerges = Collections.unmodifiableSet(onGoingMerges);
+ private final MergeTracking mergeTracking;
private final MergeSchedulerConfig config;
private final SameThreadExecutorService sameThreadExecutorService = new SameThreadExecutorService();
@@ -63,11 +47,16 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
this.shardId = shardId;
this.indexSettings = indexSettings.getSettings();
this.logger = Loggers.getLogger(getClass(), shardId);
+ this.mergeTracking = new MergeTracking(
+ logger,
+ () -> indexSettings.getMergeSchedulerConfig().isAutoThrottle() ? getIORateLimitMBPerSec() : Double.POSITIVE_INFINITY
+ );
refreshConfig();
}
+ @Override
public Set onGoingMerges() {
- return readOnlyOnGoingMerges;
+ return mergeTracking.onGoingMerges();
}
/** We're currently only interested in messages with this prefix. */
@@ -104,74 +93,21 @@ protected void message(String message) {
super.message(message);
}
- private static String getSegmentName(MergePolicy.OneMerge merge) {
- return merge.getMergeInfo() != null ? merge.getMergeInfo().info.name : "_na_";
- }
-
@Override
protected void doMerge(MergeSource mergeSource, MergePolicy.OneMerge merge) throws IOException {
- int totalNumDocs = merge.totalNumDocs();
- long totalSizeInBytes = merge.totalBytesSize();
long timeNS = System.nanoTime();
- currentMerges.inc();
- currentMergesNumDocs.inc(totalNumDocs);
- currentMergesSizeInBytes.inc(totalSizeInBytes);
-
OnGoingMerge onGoingMerge = new OnGoingMerge(merge);
- onGoingMerges.add(onGoingMerge);
-
- if (logger.isTraceEnabled()) {
- logger.trace(
- "merge [{}] starting..., merging [{}] segments, [{}] docs, [{}] size, into [{}] estimated_size",
- getSegmentName(merge),
- merge.segments.size(),
- totalNumDocs,
- ByteSizeValue.ofBytes(totalSizeInBytes),
- ByteSizeValue.ofBytes(merge.estimatedMergeBytes)
- );
- }
+ mergeTracking.mergeStarted(onGoingMerge);
try {
beforeMerge(onGoingMerge);
super.doMerge(mergeSource, merge);
} finally {
long tookMS = TimeValue.nsecToMSec(System.nanoTime() - timeNS);
+ mergeTracking.mergeFinished(merge, onGoingMerge, tookMS);
- onGoingMerges.remove(onGoingMerge);
afterMerge(onGoingMerge);
-
- currentMerges.dec();
- currentMergesNumDocs.dec(totalNumDocs);
- currentMergesSizeInBytes.dec(totalSizeInBytes);
-
- totalMergesNumDocs.inc(totalNumDocs);
- totalMergesSizeInBytes.inc(totalSizeInBytes);
- totalMerges.inc(tookMS);
- long stoppedMS = TimeValue.nsecToMSec(
- merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED)
- );
- long throttledMS = TimeValue.nsecToMSec(
- merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED)
- );
- totalMergeStoppedTime.inc(stoppedMS);
- totalMergeThrottledTime.inc(throttledMS);
-
- String message = String.format(
- Locale.ROOT,
- "merge segment [%s] done: took [%s], [%,.1f MB], [%,d docs], [%s stopped], [%s throttled]",
- getSegmentName(merge),
- TimeValue.timeValueMillis(tookMS),
- totalSizeInBytes / 1024f / 1024f,
- totalNumDocs,
- TimeValue.timeValueMillis(stoppedMS),
- TimeValue.timeValueMillis(throttledMS)
- );
-
- if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it
- logger.debug("{}", message);
- } else if (logger.isTraceEnabled()) {
- logger.trace("{}", message);
- }
}
+
}
/**
@@ -206,24 +142,13 @@ protected MergeThread getMergeThread(MergeSource mergeSource, MergePolicy.OneMer
return thread;
}
- MergeStats stats() {
- final MergeStats mergeStats = new MergeStats();
- mergeStats.add(
- totalMerges.count(),
- totalMerges.sum(),
- totalMergesNumDocs.count(),
- totalMergesSizeInBytes.count(),
- currentMerges.count(),
- currentMergesNumDocs.count(),
- currentMergesSizeInBytes.count(),
- totalMergeStoppedTime.count(),
- totalMergeThrottledTime.count(),
- config.isAutoThrottle() ? getIORateLimitMBPerSec() : Double.POSITIVE_INFINITY
- );
- return mergeStats;
+ @Override
+ public MergeStats stats() {
+ return mergeTracking.stats();
}
- void refreshConfig() {
+ @Override
+ public void refreshConfig() {
if (this.getMaxMergeCount() != config.getMaxMergeCount() || this.getMaxThreadCount() != config.getMaxThreadCount()) {
this.setMaxMergesAndThreads(config.getMaxMergeCount(), config.getMaxThreadCount());
}
@@ -234,4 +159,9 @@ void refreshConfig() {
disableAutoIOThrottle();
}
}
+
+ @Override
+ public MergeScheduler getMergeScheduler() {
+ return this;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java
new file mode 100644
index 0000000000000..ac72c7a21da75
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/index/engine/ElasticsearchMergeScheduler.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.engine;
+
+import org.apache.lucene.index.MergeScheduler;
+import org.elasticsearch.index.merge.MergeStats;
+import org.elasticsearch.index.merge.OnGoingMerge;
+
+import java.util.Set;
+
+public interface ElasticsearchMergeScheduler {
+
+ Set onGoingMerges();
+
+ MergeStats stats();
+
+ void refreshConfig();
+
+ MergeScheduler getMergeScheduler();
+}
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 1f6345416a4e6..cd571a21c90d3 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -20,6 +20,7 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
@@ -139,7 +140,7 @@ public class InternalEngine extends Engine {
private volatile long lastDeleteVersionPruneTimeMSec;
private final Translog translog;
- private final ElasticsearchConcurrentMergeScheduler mergeScheduler;
+ private final ElasticsearchMergeScheduler mergeScheduler;
private final IndexWriter indexWriter;
@@ -248,11 +249,12 @@ public InternalEngine(EngineConfig engineConfig) {
Translog translog = null;
ExternalReaderManager externalReaderManager = null;
ElasticsearchReaderManager internalReaderManager = null;
- EngineMergeScheduler scheduler = null;
+ MergeScheduler scheduler = null;
boolean success = false;
try {
this.lastDeleteVersionPruneTimeMSec = engineConfig.getThreadPool().relativeTimeInMillis();
- mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings());
+ mergeScheduler = createMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings());
+ scheduler = mergeScheduler.getMergeScheduler();
throttle = new IndexThrottle();
try {
store.trimUnsafeCommits(config().getTranslogConfig().getTranslogPath());
@@ -383,7 +385,7 @@ private SoftDeletesPolicy newSoftDeletesPolicy() throws IOException {
@Nullable
private CombinedDeletionPolicy.CommitsListener newCommitsListener() {
- Engine.IndexCommitListener listener = engineConfig.getIndexCommitListener();
+ IndexCommitListener listener = engineConfig.getIndexCommitListener();
if (listener != null) {
final IndexCommitListener wrappedListener = Assertions.ENABLED ? assertingCommitsOrderListener(listener) : listener;
return new CombinedDeletionPolicy.CommitsListener() {
@@ -824,7 +826,7 @@ private GetResult getFromTranslog(
config(),
translogInMemorySegmentsCount::incrementAndGet
);
- final Engine.Searcher searcher = new Engine.Searcher(
+ final Searcher searcher = new Searcher(
"realtime_get",
ElasticsearchDirectoryReader.wrap(inMemoryReader, shardId),
config().getSimilarity(),
@@ -841,7 +843,7 @@ public GetResult get(
Get get,
MappingLookup mappingLookup,
DocumentParser documentParser,
- Function searcherWrapper
+ Function searcherWrapper
) {
try (var ignored = acquireEnsureOpenRef()) {
if (get.realtime()) {
@@ -875,7 +877,7 @@ protected GetResult realtimeGetUnderLock(
Get get,
MappingLookup mappingLookup,
DocumentParser documentParser,
- Function searcherWrapper,
+ Function searcherWrapper,
boolean getFromSearcher
) {
assert isDrainedForClose() == false;
@@ -1098,7 +1100,7 @@ protected boolean assertPrimaryCanOptimizeAddDocument(final Index index) {
return true;
}
- private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) {
+ private boolean assertIncomingSequenceNumber(final Operation.Origin origin, final long seqNo) {
if (origin == Operation.Origin.PRIMARY) {
assert assertPrimaryIncomingSequenceNumber(origin, seqNo);
} else {
@@ -1108,7 +1110,7 @@ private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origi
return true;
}
- protected boolean assertPrimaryIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) {
+ protected boolean assertPrimaryIncomingSequenceNumber(final Operation.Origin origin, final long seqNo) {
// sequence number should not be set when operation origin is primary
assert seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO
: "primary operations must never have an assigned sequence number but was [" + seqNo + "]";
@@ -2700,7 +2702,7 @@ private IndexWriterConfig getIndexWriterConfig() {
iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
iwc.setIndexDeletionPolicy(combinedDeletionPolicy);
iwc.setInfoStream(TESTS_VERBOSE ? InfoStream.getDefault() : new LoggerInfoStream(logger));
- iwc.setMergeScheduler(mergeScheduler);
+ iwc.setMergeScheduler(mergeScheduler.getMergeScheduler());
// Give us the opportunity to upgrade old segments while performing
// background merges
MergePolicy mergePolicy = config().getMergePolicy();
@@ -2753,7 +2755,7 @@ private IndexWriterConfig getIndexWriterConfig() {
/** A listener that warms the segments if needed when acquiring a new reader */
static final class RefreshWarmerListener implements BiConsumer {
- private final Engine.Warmer warmer;
+ private final Warmer warmer;
private final Logger logger;
private final AtomicBoolean isEngineClosed;
@@ -2817,6 +2819,10 @@ LiveIndexWriterConfig getCurrentIndexWriterConfig() {
return indexWriter.getConfig();
}
+ protected ElasticsearchMergeScheduler createMergeScheduler(ShardId shardId, IndexSettings indexSettings) {
+ return new EngineMergeScheduler(shardId, indexSettings);
+ }
+
private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeScheduler {
private final AtomicInteger numMergesInFlight = new AtomicInteger(0);
private final AtomicBoolean isThrottling = new AtomicBoolean();
@@ -2827,7 +2833,7 @@ private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeSch
@Override
public synchronized void beforeMerge(OnGoingMerge merge) {
- int maxNumMerges = mergeScheduler.getMaxMergeCount();
+ int maxNumMerges = getMaxMergeCount();
if (numMergesInFlight.incrementAndGet() > maxNumMerges) {
if (isThrottling.getAndSet(true) == false) {
logger.info("now throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges);
@@ -2838,7 +2844,7 @@ public synchronized void beforeMerge(OnGoingMerge merge) {
@Override
public synchronized void afterMerge(OnGoingMerge merge) {
- int maxNumMerges = mergeScheduler.getMaxMergeCount();
+ int maxNumMerges = getMaxMergeCount();
if (numMergesInFlight.decrementAndGet() < maxNumMerges) {
if (isThrottling.getAndSet(false)) {
logger.info("stop throttling indexing: numMergesInFlight={}, maxNumMerges={}", numMergesInFlight, maxNumMerges);
@@ -2876,25 +2882,29 @@ protected void doRun() {
@Override
protected void handleMergeException(final Throwable exc) {
- engineConfig.getThreadPool().generic().execute(new AbstractRunnable() {
- @Override
- public void onFailure(Exception e) {
- logger.debug("merge failure action rejected", e);
- }
-
- @Override
- protected void doRun() throws Exception {
- /*
- * We do this on another thread rather than the merge thread that we are initially called on so that we have complete
- * confidence that the call stack does not contain catch statements that would cause the error that might be thrown
- * here from being caught and never reaching the uncaught exception handler.
- */
- failEngine("merge failed", new MergePolicy.MergeException(exc));
- }
- });
+ mergeException(exc);
}
}
+ protected void mergeException(final Throwable exc) {
+ engineConfig.getThreadPool().generic().execute(new AbstractRunnable() {
+ @Override
+ public void onFailure(Exception e) {
+ logger.debug("merge failure action rejected", e);
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ /*
+ * We do this on another thread rather than the merge thread that we are initially called on so that we have complete
+ * confidence that the call stack does not contain catch statements that would cause the error that might be thrown
+ * here from being caught and never reaching the uncaught exception handler.
+ */
+ failEngine("merge failed", new MergePolicy.MergeException(exc));
+ }
+ });
+ }
+
/**
* Commits the specified index writer.
*
diff --git a/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java b/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java
new file mode 100644
index 0000000000000..3f52b607cf356
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/index/engine/MergeTracking.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.engine;
+
+import org.apache.logging.log4j.Logger;
+import org.apache.lucene.index.MergePolicy;
+import org.elasticsearch.common.metrics.CounterMetric;
+import org.elasticsearch.common.metrics.MeanMetric;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.index.merge.MergeStats;
+import org.elasticsearch.index.merge.OnGoingMerge;
+
+import java.util.Collections;
+import java.util.Locale;
+import java.util.Set;
+import java.util.function.DoubleSupplier;
+
+public class MergeTracking {
+
+ protected final Logger logger;
+ private final DoubleSupplier mbPerSecAutoThrottle;
+
+ private final MeanMetric totalMerges = new MeanMetric();
+ private final CounterMetric totalMergesNumDocs = new CounterMetric();
+ private final CounterMetric totalMergesSizeInBytes = new CounterMetric();
+ private final CounterMetric currentMerges = new CounterMetric();
+ private final CounterMetric currentMergesNumDocs = new CounterMetric();
+ private final CounterMetric currentMergesSizeInBytes = new CounterMetric();
+ private final CounterMetric totalMergeStoppedTime = new CounterMetric();
+ private final CounterMetric totalMergeThrottledTime = new CounterMetric();
+
+ private final Set onGoingMerges = ConcurrentCollections.newConcurrentSet();
+ private final Set readOnlyOnGoingMerges = Collections.unmodifiableSet(onGoingMerges);
+
+ public MergeTracking(Logger logger, DoubleSupplier mbPerSecAutoThrottle) {
+ this.logger = logger;
+ this.mbPerSecAutoThrottle = mbPerSecAutoThrottle;
+ }
+
+ public Set onGoingMerges() {
+ return readOnlyOnGoingMerges;
+ }
+
+ public void mergeStarted(OnGoingMerge onGoingMerge) {
+ MergePolicy.OneMerge merge = onGoingMerge.getMerge();
+ int totalNumDocs = merge.totalNumDocs();
+ long totalSizeInBytes = merge.totalBytesSize();
+ currentMerges.inc();
+ currentMergesNumDocs.inc(totalNumDocs);
+ currentMergesSizeInBytes.inc(totalSizeInBytes);
+ onGoingMerges.add(onGoingMerge);
+
+ if (logger.isTraceEnabled()) {
+ logger.trace(
+ "merge [{}] starting: merging [{}] segments, [{}] docs, [{}] size, into [{}] estimated_size",
+ onGoingMerge.getId(),
+ merge.segments.size(),
+ totalNumDocs,
+ ByteSizeValue.ofBytes(totalSizeInBytes),
+ ByteSizeValue.ofBytes(merge.estimatedMergeBytes)
+ );
+ }
+ }
+
+ public void mergeFinished(final MergePolicy.OneMerge merge, final OnGoingMerge onGoingMerge, long tookMS) {
+ int totalNumDocs = merge.totalNumDocs();
+ long totalSizeInBytes = merge.totalBytesSize();
+
+ onGoingMerges.remove(onGoingMerge);
+
+ currentMerges.dec();
+ currentMergesNumDocs.dec(totalNumDocs);
+ currentMergesSizeInBytes.dec(totalSizeInBytes);
+
+ totalMergesNumDocs.inc(totalNumDocs);
+ totalMergesSizeInBytes.inc(totalSizeInBytes);
+ totalMerges.inc(tookMS);
+ long stoppedMS = TimeValue.nsecToMSec(
+ merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED)
+ );
+ long throttledMS = TimeValue.nsecToMSec(
+ merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED)
+ );
+ totalMergeStoppedTime.inc(stoppedMS);
+ totalMergeThrottledTime.inc(throttledMS);
+
+ String message = String.format(
+ Locale.ROOT,
+ "merge [%s] segment [%s] done: took [%s], [%s], [%,d] docs, [%s] stopped, [%s] throttled",
+ onGoingMerge.getId(),
+ getSegmentName(merge),
+ TimeValue.timeValueMillis(tookMS),
+ ByteSizeValue.ofBytes(totalSizeInBytes),
+ totalNumDocs,
+ TimeValue.timeValueMillis(stoppedMS),
+ TimeValue.timeValueMillis(throttledMS)
+ );
+
+ if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it
+ logger.debug("{}", message);
+ } else if (logger.isTraceEnabled()) {
+ logger.trace("{}", message);
+ }
+ }
+
+ public MergeStats stats() {
+ final MergeStats mergeStats = new MergeStats();
+ mergeStats.add(
+ totalMerges.count(),
+ totalMerges.sum(),
+ totalMergesNumDocs.count(),
+ totalMergesSizeInBytes.count(),
+ currentMerges.count(),
+ currentMergesNumDocs.count(),
+ currentMergesSizeInBytes.count(),
+ totalMergeStoppedTime.count(),
+ totalMergeThrottledTime.count(),
+ mbPerSecAutoThrottle.getAsDouble()
+ );
+ return mergeStats;
+ }
+
+ private static String getSegmentName(MergePolicy.OneMerge merge) {
+ return merge.getMergeInfo() != null ? merge.getMergeInfo().info.name : "_na_";
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
index ad22f4917cb79..2e49e271c3e4b 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
@@ -111,6 +111,7 @@ public int get() {
private final Set ignoredFields;
private final List ignoredFieldValues;
private final List ignoredFieldsMissingValues;
+ private final boolean inArrayScopeEnabled;
private boolean inArrayScope;
private final Map> dynamicMappers;
@@ -143,6 +144,7 @@ private DocumentParserContext(
Set ignoreFields,
List ignoredFieldValues,
List ignoredFieldsWithNoSource,
+ boolean inArrayScopeEnabled,
boolean inArrayScope,
Map> dynamicMappers,
Map dynamicObjectMappers,
@@ -164,6 +166,7 @@ private DocumentParserContext(
this.ignoredFields = ignoreFields;
this.ignoredFieldValues = ignoredFieldValues;
this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource;
+ this.inArrayScopeEnabled = inArrayScopeEnabled;
this.inArrayScope = inArrayScope;
this.dynamicMappers = dynamicMappers;
this.dynamicObjectMappers = dynamicObjectMappers;
@@ -188,6 +191,7 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic,
in.ignoredFields,
in.ignoredFieldValues,
in.ignoredFieldsMissingValues,
+ in.inArrayScopeEnabled,
in.inArrayScope,
in.dynamicMappers,
in.dynamicObjectMappers,
@@ -219,6 +223,7 @@ protected DocumentParserContext(
new HashSet<>(),
new ArrayList<>(),
new ArrayList<>(),
+ mappingParserContext.getIndexSettings().isSyntheticSourceSecondDocParsingPassEnabled(),
false,
new HashMap<>(),
new HashMap<>(),
@@ -371,7 +376,7 @@ public final Collection getIgnoredFieldsMiss
* Applies to synthetic source only.
*/
public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOException {
- if (canAddIgnoredField() && mapper instanceof ObjectMapper) {
+ if (canAddIgnoredField() && mapper instanceof ObjectMapper && inArrayScopeEnabled) {
boolean isNested = mapper instanceof NestedObjectMapper;
if ((inArrayScope == false && isNested == false) || (inArrayScope && isNested)) {
DocumentParserContext subcontext = switchParser(parser());
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java
index 118cdbffc5db9..0883434a0e393 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java
@@ -18,11 +18,13 @@
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.index.IndexMode;
+import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.SearchExecutionContext;
@@ -59,8 +61,20 @@ public class SourceFieldMapper extends MetadataFieldMapper {
public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters";
+ public static final Setting INDEX_MAPPER_SOURCE_MODE_SETTING = Setting.enumSetting(SourceFieldMapper.Mode.class, settings -> {
+ final IndexMode indexMode = IndexSettings.MODE.get(settings);
+
+ switch (indexMode) {
+ case LOGSDB:
+ case TIME_SERIES:
+ return Mode.SYNTHETIC.name();
+ default:
+ return Mode.STORED.name();
+ }
+ }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope);
+
/** The source mode */
- private enum Mode {
+ public enum Mode {
DISABLED,
STORED,
SYNTHETIC
@@ -93,6 +107,15 @@ private enum Mode {
true
);
+ private static final SourceFieldMapper TSDB_DEFAULT_STORED = new SourceFieldMapper(
+ Mode.STORED,
+ Explicit.IMPLICIT_TRUE,
+ Strings.EMPTY_ARRAY,
+ Strings.EMPTY_ARRAY,
+ IndexMode.TIME_SERIES,
+ true
+ );
+
private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper(
Mode.SYNTHETIC,
Explicit.IMPLICIT_TRUE,
@@ -102,6 +125,15 @@ private enum Mode {
false
);
+ private static final SourceFieldMapper TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper(
+ Mode.STORED,
+ Explicit.IMPLICIT_TRUE,
+ Strings.EMPTY_ARRAY,
+ Strings.EMPTY_ARRAY,
+ IndexMode.TIME_SERIES,
+ false
+ );
+
private static final SourceFieldMapper LOGSDB_DEFAULT = new SourceFieldMapper(
Mode.SYNTHETIC,
Explicit.IMPLICIT_TRUE,
@@ -111,6 +143,15 @@ private enum Mode {
true
);
+ private static final SourceFieldMapper LOGSDB_DEFAULT_STORED = new SourceFieldMapper(
+ Mode.STORED,
+ Explicit.IMPLICIT_TRUE,
+ Strings.EMPTY_ARRAY,
+ Strings.EMPTY_ARRAY,
+ IndexMode.LOGSDB,
+ true
+ );
+
private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE = new SourceFieldMapper(
Mode.SYNTHETIC,
Explicit.IMPLICIT_TRUE,
@@ -120,6 +161,15 @@ private enum Mode {
false
);
+ private static final SourceFieldMapper LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED = new SourceFieldMapper(
+ Mode.STORED,
+ Explicit.IMPLICIT_TRUE,
+ Strings.EMPTY_ARRAY,
+ Strings.EMPTY_ARRAY,
+ IndexMode.LOGSDB,
+ false
+ );
+
/*
* Synthetic source was added as the default for TSDB in v.8.7. The legacy field mapper below
* is used in bwc tests and mixed clusters containing time series indexes created in an earlier version.
@@ -194,6 +244,8 @@ public static class Builder extends MetadataFieldMapper.Builder {
m -> Arrays.asList(toType(m).excludes)
);
+ private final Settings settings;
+
private final IndexMode indexMode;
private final boolean supportsNonDefaultParameterValues;
@@ -207,6 +259,7 @@ public Builder(
boolean enableRecoverySource
) {
super(Defaults.NAME);
+ this.settings = settings;
this.indexMode = indexMode;
this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false
|| settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true);
@@ -223,10 +276,10 @@ protected Parameter>[] getParameters() {
return new Parameter>[] { enabled, mode, includes, excludes };
}
- private boolean isDefault() {
- Mode m = mode.get();
- if (m != null
- && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) {
+ private boolean isDefault(final Mode sourceMode) {
+ if (sourceMode != null
+ && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && sourceMode == Mode.SYNTHETIC) == false)
+ || sourceMode == Mode.DISABLED)) {
return false;
}
return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty();
@@ -242,12 +295,14 @@ public SourceFieldMapper build() {
throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters");
}
}
- if (isDefault()) {
- return switch (indexMode) {
- case TIME_SERIES -> enableRecoverySource ? TSDB_DEFAULT : TSDB_DEFAULT_NO_RECOVERY_SOURCE;
- case LOGSDB -> enableRecoverySource ? LOGSDB_DEFAULT : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE;
- default -> enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE;
- };
+ // NOTE: if the `index.mapper.source.mode` exists it takes precedence to determine the source mode for `_source`
+ // otherwise the mode is determined according to `index.mode` and `_source.mode`.
+ final Mode sourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.exists(settings)
+ ? INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings)
+ : mode.get();
+ if (isDefault(sourceMode)) {
+ return resolveSourceMode(indexMode, sourceMode, enableRecoverySource);
+
}
if (supportsNonDefaultParameterValues == false) {
List disallowed = new ArrayList<>();
@@ -271,8 +326,9 @@ public SourceFieldMapper build() {
);
}
}
+
SourceFieldMapper sourceFieldMapper = new SourceFieldMapper(
- mode.get(),
+ sourceMode,
enabled.get(),
includes.getValue().toArray(Strings.EMPTY_ARRAY),
excludes.getValue().toArray(Strings.EMPTY_ARRAY),
@@ -287,21 +343,42 @@ public SourceFieldMapper build() {
}
+ private static SourceFieldMapper resolveSourceMode(final IndexMode indexMode, final Mode sourceMode, boolean enableRecoverySource) {
+ if (indexMode == IndexMode.STANDARD) {
+ return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE;
+ }
+ final SourceFieldMapper syntheticWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES
+ ? TSDB_DEFAULT_NO_RECOVERY_SOURCE
+ : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE;
+ final SourceFieldMapper syntheticWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : LOGSDB_DEFAULT;
+ final SourceFieldMapper storedWithoutRecoverySource = indexMode == IndexMode.TIME_SERIES
+ ? TSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED
+ : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE_STORED;
+ final SourceFieldMapper storedWithRecoverySource = indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT_STORED : LOGSDB_DEFAULT_STORED;
+
+ switch (sourceMode) {
+ case SYNTHETIC:
+ return enableRecoverySource ? syntheticWithRecoverySource : syntheticWithoutRecoverySource;
+ case STORED:
+ return enableRecoverySource ? storedWithRecoverySource : storedWithoutRecoverySource;
+ case DISABLED:
+ throw new IllegalArgumentException("_source cannot be disabled in index using [" + indexMode + "] index mode");
+ default:
+ throw new IllegalStateException("Unexpected value: " + sourceMode);
+ }
+ }
+
public static final TypeParser PARSER = new ConfigurableTypeParser(c -> {
- var indexMode = c.getIndexSettings().getMode();
+ final IndexMode indexMode = c.getIndexSettings().getMode();
boolean enableRecoverySource = INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(c.getSettings());
+ final Mode settingSourceMode = INDEX_MAPPER_SOURCE_MODE_SETTING.get(c.getSettings());
+
if (indexMode.isSyntheticSourceEnabled()) {
- if (indexMode == IndexMode.TIME_SERIES) {
- if (c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0)) {
- return enableRecoverySource ? TSDB_DEFAULT : TSDB_DEFAULT_NO_RECOVERY_SOURCE;
- } else {
- return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE;
- }
- } else if (indexMode == IndexMode.LOGSDB) {
- return enableRecoverySource ? LOGSDB_DEFAULT : LOGSDB_DEFAULT_NO_RECOVERY_SOURCE;
+ if (indexMode == IndexMode.TIME_SERIES && c.getIndexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0)) {
+ return enableRecoverySource ? TSDB_LEGACY_DEFAULT : TSDB_LEGACY_DEFAULT_NO_RECOVERY_SOURCE;
}
}
- return enableRecoverySource ? DEFAULT : DEFAULT_NO_RECOVERY_SOURCE;
+ return resolveSourceMode(indexMode, settingSourceMode, enableRecoverySource);
},
c -> new Builder(
c.getIndexSettings().getMode(),
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
index d7353584706d8..c3959bd442a1a 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java
@@ -2270,7 +2270,7 @@ public void write(XContentBuilder b) throws IOException {
if (indexCreatedVersion.onOrAfter(LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION)) {
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
}
- int dims = fieldType().dims;
+ int dims = fieldType().elementType == ElementType.BIT ? fieldType().dims / Byte.SIZE : fieldType().dims;
for (int dim = 0; dim < dims; dim++) {
fieldType().elementType.readAndWriteValue(byteBuffer, b);
}
diff --git a/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java b/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java
index df49e00f8af73..7c40fdc93a48b 100644
--- a/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java
+++ b/server/src/main/java/org/elasticsearch/index/merge/OnGoingMerge.java
@@ -50,4 +50,8 @@ public long getTotalBytesSize() {
public List getMergedSegments() {
return oneMerge.segments;
}
+
+ public MergePolicy.OneMerge getMerge() {
+ return oneMerge;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java
index 89f651468068d..0af7f80608870 100644
--- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -38,6 +38,7 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RecoverySource;
@@ -1704,7 +1705,7 @@ interface IndexDeletionAllowedPredicate {
IndexSettings indexSettings) -> canDeleteIndexContents(index);
private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true;
- public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) {
+ public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) {
/* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch
* of dependencies we pass in a function that can perform the parsing. */
CheckedFunction filterParser = bytes -> {
diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
index 6be94ab21a4f7..b0d33a75ba883 100644
--- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
+++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
@@ -293,9 +293,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(Fields.TYPE, recoverySource.getType());
builder.field(Fields.STAGE, stage.toString());
builder.field(Fields.PRIMARY, primary);
- builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
+ builder.timestampFieldsFromUnixEpochMillis(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
if (timer.stopTime > 0) {
- builder.timeField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
+ builder.timestampFieldsFromUnixEpochMillis(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
}
builder.humanReadableField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, new TimeValue(timer.time()));
diff --git a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java
index 0e5b3a555b800..9c666bd4a35f5 100644
--- a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java
+++ b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java
@@ -16,6 +16,7 @@
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
+import java.util.Map;
/**
* This class defines an empty secret settings object. This is useful for services that do not have any secret settings.
@@ -48,4 +49,9 @@ public TransportVersion getMinimalSupportedVersion() {
@Override
public void writeTo(StreamOutput out) throws IOException {}
+
+ @Override
+ public SecretSettings newSecretSettings(Map newSecrets) {
+ return INSTANCE;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java
index 0c863932c6afe..cba0282f7fed8 100644
--- a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java
+++ b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java
@@ -16,6 +16,7 @@
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
+import java.util.Map;
/**
* This class defines an empty task settings object. This is useful for services that do not have any task settings.
@@ -53,4 +54,9 @@ public TransportVersion getMinimalSupportedVersion() {
@Override
public void writeTo(StreamOutput out) throws IOException {}
+
+ @Override
+ public TaskSettings updatedTaskSettings(Map newSettings) {
+ return INSTANCE;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
index cbbfef2cc65fa..190f2d689a58d 100644
--- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java
+++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
@@ -192,12 +192,22 @@ default boolean canStream(TaskType taskType) {
return supportedStreamingTasks().contains(taskType);
}
+ record DefaultConfigId(String inferenceId, TaskType taskType, InferenceService service) {};
+
/**
- * A service can define default configurations that can be
- * used out of the box without creating an endpoint first.
- * @return Default configurations provided by this service
+ * Get the Ids and task type of any default configurations provided by this service
+ * @return Defaults
*/
- default List defaultConfigs() {
+ default List defaultConfigIds() {
return List.of();
}
+
+ /**
+ * Call the listener with the default model configurations defined by
+ * the service
+ * @param defaultsListener The listener
+ */
+ default void defaultConfigs(ActionListener> defaultsListener) {
+ defaultsListener.onResponse(List.of());
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java
index 68dc865b4c7db..3274bf571d10a 100644
--- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java
+++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java
@@ -10,6 +10,8 @@
package org.elasticsearch.inference;
import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.List;
@@ -21,7 +23,7 @@ public interface InferenceServiceExtension {
List getInferenceServiceFactories();
- record InferenceServiceFactoryContext(Client client, ThreadPool threadPool) {}
+ record InferenceServiceFactoryContext(Client client, ThreadPool threadPool, ClusterService clusterService, Settings settings) {}
interface Factory {
/**
diff --git a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java
index e2c0c8b58c69b..90ca92bb0e2ef 100644
--- a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java
+++ b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java
@@ -12,6 +12,9 @@
import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
import org.elasticsearch.xcontent.ToXContentObject;
+import java.util.Map;
+
public interface SecretSettings extends ToXContentObject, VersionedNamedWriteable {
+ SecretSettings newSecretSettings(Map newSecrets);
}
diff --git a/server/src/main/java/org/elasticsearch/inference/TaskSettings.java b/server/src/main/java/org/elasticsearch/inference/TaskSettings.java
index 9862abce2332c..7dd20688245ba 100644
--- a/server/src/main/java/org/elasticsearch/inference/TaskSettings.java
+++ b/server/src/main/java/org/elasticsearch/inference/TaskSettings.java
@@ -12,6 +12,11 @@
import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
import org.elasticsearch.xcontent.ToXContentObject;
+import java.util.Map;
+
public interface TaskSettings extends ToXContentObject, VersionedNamedWriteable {
+
boolean isEmpty();
+
+ TaskSettings updatedTaskSettings(Map newSettings);
}
diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
index 1c68615203d3a..a3639214a1b9d 100644
--- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
+++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
@@ -420,7 +420,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(Fields.VM_VERSION, vmVersion);
builder.field(Fields.VM_VENDOR, vmVendor);
builder.field(Fields.USING_BUNDLED_JDK, usingBundledJdk);
- builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
+ builder.timestampFieldsFromUnixEpochMillis(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
builder.startObject(Fields.MEM);
builder.humanReadableField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, ByteSizeValue.ofBytes(mem.heapInit));
diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java
index 5024cc5468866..32a65302922a8 100644
--- a/server/src/main/java/org/elasticsearch/node/Node.java
+++ b/server/src/main/java/org/elasticsearch/node/Node.java
@@ -561,7 +561,7 @@ public synchronized void close() throws IOException {
toClose.add(() -> stopWatch.stop().start("transport"));
toClose.add(injector.getInstance(TransportService.class));
toClose.add(injector.getInstance(NodeMetrics.class));
- toClose.add(injector.getInstance(IndicesService.class));
+ toClose.add(injector.getInstance(IndicesMetrics.class));
if (ReadinessService.enabled(environment)) {
toClose.add(injector.getInstance(ReadinessService.class));
}
diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
index 45fd6afe4fca6..7828bb956a160 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java
@@ -20,6 +20,11 @@ private SearchCapabilities() {}
/** Support regex and range match rules in interval queries. */
private static final String RANGE_REGEX_INTERVAL_QUERY_CAPABILITY = "range_regexp_interval_queries";
+ /** Support synthetic source with `bit` type in `dense_vector` field when `index` is set to `false`. */
+ private static final String BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY = "bit_dense_vector_synthetic_source";
- public static final Set CAPABILITIES = Set.of(RANGE_REGEX_INTERVAL_QUERY_CAPABILITY);
+ public static final Set CAPABILITIES = Set.of(
+ RANGE_REGEX_INTERVAL_QUERY_CAPABILITY,
+ BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY
+ );
}
diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java
index e380034b627d1..f6043e1c30315 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchService.java
@@ -27,6 +27,7 @@
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.CheckedSupplier;
@@ -1676,7 +1677,7 @@ public boolean isForceExecution() {
}
}
- public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) {
+ public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) {
return indicesService.buildAliasFilter(state, index, resolvedExpressions);
}
diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java
index 6707d77d6a2d0..d49ac1e29bea6 100644
--- a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java
+++ b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java
@@ -115,7 +115,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
if (description != null) {
builder.field("description", description);
}
- builder.timeField("start_time_in_millis", "start_time", startTime);
+ builder.timestampFieldsFromUnixEpochMillis("start_time_in_millis", "start_time", startTime);
if (builder.humanReadable()) {
builder.field("running_time", new TimeValue(runningTimeNanos, TimeUnit.NANOSECONDS).toString());
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java
index 834bacd9e6a04..1faeabb6acbf7 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java
@@ -22,6 +22,7 @@
import org.elasticsearch.cluster.metadata.DataStreamTestHelper;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
@@ -229,9 +230,19 @@ public void testResolveHiddenProperlyWithDateMath() {
.metadata(buildMetadata(new Object[][] {}, indices))
.build();
String[] requestedIndex = new String[] { "" };
- Set resolvedIndices = resolver.resolveExpressions(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN, true, requestedIndex);
+ Set resolvedIndices = resolver.resolveExpressions(
+ clusterState,
+ IndicesOptions.LENIENT_EXPAND_OPEN,
+ true,
+ requestedIndex
+ );
assertThat(resolvedIndices.size(), is(1));
- assertThat(resolvedIndices, contains(oneOf("logs-pgsql-prod-" + todaySuffix, "logs-pgsql-prod-" + tomorrowSuffix)));
+ assertThat(
+ resolvedIndices,
+ contains(
+ oneOf(new ResolvedExpression("logs-pgsql-prod-" + todaySuffix), new ResolvedExpression("logs-pgsql-prod-" + tomorrowSuffix))
+ )
+ );
}
public void testSystemIndexAccess() {
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java
index 8f0ff82beab4b..74408b99e92ce 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java
@@ -17,6 +17,7 @@
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettingProvider;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.SystemIndices;
@@ -69,7 +70,7 @@ public void testSettingsProviderIsOverridden() throws Exception {
public Settings getAdditionalIndexSettings(
String indexName,
String dataStreamName,
- boolean timeSeries,
+ IndexMode templateIndexMode,
Metadata metadata,
Instant resolvedAt,
Settings allSettings,
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
index 6be5b48f9d723..fe0b7926229cb 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
@@ -15,6 +15,7 @@
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
@@ -26,7 +27,6 @@
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Collections;
import java.util.List;
import java.util.Locale;
@@ -52,11 +52,11 @@ private static String formatDate(String pattern, ZonedDateTime zonedDateTime) {
public void testNormal() throws Exception {
int numIndexExpressions = randomIntBetween(1, 9);
- List indexExpressions = new ArrayList<>(numIndexExpressions);
+ List indexExpressions = new ArrayList<>(numIndexExpressions);
for (int i = 0; i < numIndexExpressions; i++) {
- indexExpressions.add(randomAlphaOfLength(10));
+ indexExpressions.add(new ResolvedExpression(randomAlphaOfLength(10)));
}
- List result = DateMathExpressionResolver.resolve(context, indexExpressions);
+ List result = DateMathExpressionResolver.resolve(context, indexExpressions);
assertThat(result.size(), equalTo(indexExpressions.size()));
for (int i = 0; i < indexExpressions.size(); i++) {
assertThat(result.get(i), equalTo(indexExpressions.get(i)));
@@ -64,25 +64,25 @@ public void testNormal() throws Exception {
}
public void testExpression() throws Exception {
- List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", "");
- List result = DateMathExpressionResolver.resolve(context, indexExpressions);
+ List indexExpressions = resolvedExpressions("<.marvel-{now}>", "<.watch_history-{now}>", "");
+ List result = DateMathExpressionResolver.resolve(context, indexExpressions);
assertThat(result.size(), equalTo(3));
- assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
- assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
- assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(1).resource(), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(2).resource(), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
}
public void testExpressionWithWildcardAndExclusions() {
- List indexExpressions = Arrays.asList(
+ List indexExpressions = resolvedExpressions(
"<-before-inner-{now}>",
"-",
"",
"<-after-inner-{now}>",
"-"
);
- List result = DateMathExpressionResolver.resolve(context, indexExpressions);
+ List result = DateMathExpressionResolver.resolve(context, indexExpressions);
assertThat(
- result,
+ result.stream().map(ResolvedExpression::resource).toList(),
Matchers.contains(
equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))),
equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion
@@ -98,7 +98,7 @@ public void testExpressionWithWildcardAndExclusions() {
);
result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions);
assertThat(
- result,
+ result.stream().map(ResolvedExpression::resource).toList(),
Matchers.contains(
equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))),
// doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion
@@ -112,21 +112,24 @@ public void testExpressionWithWildcardAndExclusions() {
}
public void testEmpty() throws Exception {
- List result = DateMathExpressionResolver.resolve(context, Collections.emptyList());
+ List result = DateMathExpressionResolver.resolve(context, List.of());
assertThat(result.size(), equalTo(0));
}
public void testExpression_Static() throws Exception {
- List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>"));
+ List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-test>"));
assertThat(result.size(), equalTo(1));
- assertThat(result.get(0), equalTo(".marvel-test"));
+ assertThat(result.get(0).resource(), equalTo(".marvel-test"));
}
public void testExpression_MultiParts() throws Exception {
- List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>"));
+ List result = DateMathExpressionResolver.resolve(
+ context,
+ resolvedExpressions("<.text1-{now/d}-text2-{now/M}>")
+ );
assertThat(result.size(), equalTo(1));
assertThat(
- result.get(0),
+ result.get(0).resource(),
equalTo(
".text1-"
+ formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))
@@ -137,33 +140,42 @@ public void testExpression_MultiParts() throws Exception {
}
public void testExpression_CustomFormat() throws Exception {
- List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>"));
+ List results = DateMathExpressionResolver.resolve(
+ context,
+ resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd}}>")
+ );
assertThat(results.size(), equalTo(1));
- assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
}
public void testExpression_EscapeStatic() throws Exception {
- List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>"));
+ List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar\\{v\\}el-{now/d}>"));
assertThat(result.size(), equalTo(1));
- assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(0).resource(), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
}
public void testExpression_EscapeDateFormat() throws Exception {
- List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>"));
+ List result = DateMathExpressionResolver.resolve(
+ context,
+ resolvedExpressions("<.marvel-{now/d{'\\{year\\}'yyyy}}>")
+ );
assertThat(result.size(), equalTo(1));
- assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime()))));
}
public void testExpression_MixedArray() throws Exception {
- List result = DateMathExpressionResolver.resolve(
+ List result = DateMathExpressionResolver.resolve(
context,
- Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>")
+ resolvedExpressions("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>")
);
assertThat(result.size(), equalTo(4));
- assertThat(result.get(0), equalTo("name1"));
- assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
- assertThat(result.get(2), equalTo("name2"));
- assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1))));
+ assertThat(result.get(0).resource(), equalTo("name1"));
+ assertThat(result.get(1).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))));
+ assertThat(result.get(2).resource(), equalTo("name2"));
+ assertThat(
+ result.get(3).resource(),
+ equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))
+ );
}
public void testExpression_CustomTimeZoneInIndexName() throws Exception {
@@ -202,19 +214,19 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception {
name -> false,
name -> false
);
- List results = DateMathExpressionResolver.resolve(
+ List results = DateMathExpressionResolver.resolve(
context,
- Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>")
+ resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>")
);
assertThat(results.size(), equalTo(1));
logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0));
- assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone))));
+ assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone))));
}
public void testExpressionInvalidUnescaped() throws Exception {
Exception e = expectThrows(
ElasticsearchParseException.class,
- () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>"))
+ () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar}vel-{now/d}>"))
);
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("invalid character at position ["));
@@ -223,7 +235,7 @@ public void testExpressionInvalidUnescaped() throws Exception {
public void testExpressionInvalidDateMathFormat() throws Exception {
Exception e = expectThrows(
ElasticsearchParseException.class,
- () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>"))
+ () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}>"))
);
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
@@ -232,7 +244,7 @@ public void testExpressionInvalidDateMathFormat() throws Exception {
public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
Exception e = expectThrows(
ElasticsearchParseException.class,
- () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>"))
+ () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}}>"))
);
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("missing date format"));
@@ -241,10 +253,13 @@ public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
public void testExpressionInvalidOpenEnded() throws Exception {
Exception e = expectThrows(
ElasticsearchParseException.class,
- () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>"))
+ () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d>"))
);
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
}
+ private List resolvedExpressions(String... expressions) {
+ return Arrays.stream(expressions).map(ResolvedExpression::new).toList();
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java
index 1ca59ff402bd8..1df3bf4132b60 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java
@@ -13,10 +13,12 @@
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.function.Supplier;
@@ -39,10 +41,13 @@ public void testEmpty() {
public void testExplicitSingleNameExpression() {
for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) {
for (String expressionString : List.of("non_wildcard", "-non_exclusion")) {
- ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString));
+ ExpressionList expressionList = new ExpressionList(
+ getContextWithOptions(indicesOptions),
+ resolvedExpressions(expressionString)
+ );
assertThat(expressionList.hasWildcard(), is(false));
if (randomBoolean()) {
- expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString));
+ expressionList = new ExpressionList(getContextWithOptions(indicesOptions), resolvedExpressions((expressionString)));
}
Iterator expressionIterator = expressionList.iterator();
assertThat(expressionIterator.hasNext(), is(true));
@@ -62,11 +67,14 @@ public void testWildcardSingleExpression() {
for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) {
ExpressionList expressionList = new ExpressionList(
getContextWithOptions(getExpandWildcardsIndicesOptions()),
- List.of(wildcardTest)
+ resolvedExpressions(wildcardTest)
);
assertThat(expressionList.hasWildcard(), is(true));
if (randomBoolean()) {
- expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest));
+ expressionList = new ExpressionList(
+ getContextWithOptions(getExpandWildcardsIndicesOptions()),
+ resolvedExpressions(wildcardTest)
+ );
}
Iterator expressionIterator = expressionList.iterator();
assertThat(expressionIterator.hasNext(), is(true));
@@ -82,13 +90,13 @@ public void testWildcardSingleExpression() {
}
public void testWildcardLongerExpression() {
- List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5));
- String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**");
- List expressionList = new ArrayList<>(onlyExplicits.size() + 1);
+ List onlyExplicits = randomList(7, () -> new ResolvedExpression(randomAlphaOfLengthBetween(0, 5)));
+ ResolvedExpression wildcard = new ResolvedExpression(randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"));
+ List