Skip to content

Commit

Permalink
Merge branch 'main' into fix-lookupjoin-wildcard-fieldcaps
Browse files Browse the repository at this point in the history
  • Loading branch information
craigtaverner authored Dec 11, 2024
2 parents dc611ab + a7fdc10 commit 85d3362
Show file tree
Hide file tree
Showing 177 changed files with 8,514 additions and 4,336 deletions.
5 changes: 5 additions & 0 deletions docs/changelog/114618.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 114618
summary: Add a new index setting to skip recovery source when synthetic source is enabled
area: Logs
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/116663.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 116663
summary: KNN vector rescoring for quantized vectors
area: Vector Search
type: feature
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/117469.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 117469
summary: Handle exceptions in query phase can match
area: Search
type: bug
issues:
- 104994
5 changes: 5 additions & 0 deletions docs/changelog/118354.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118354
summary: Fix log message format bugs
area: Ingest Node
type: bug
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/118370.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 118370
summary: Fix concurrency issue with `ReinitializingSourceProvider`
area: Mapping
type: bug
issues:
- 118238
5 changes: 5 additions & 0 deletions docs/changelog/118378.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118378
summary: Opt into extra data stream resolution
area: ES|QL
type: bug
issues: []
2 changes: 1 addition & 1 deletion docs/plugins/analysis-nori.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ The input is untokenized text and the result is the single term attribute emitte
- 영영칠 -> 7
- 일영영영 -> 1000
- 삼천2백2십삼 -> 3223
- 조육백만오천일 -> 1000006005001
- 일조육백만오천일 -> 1000006005001
- 3.2천 -> 3200
- 1.2만345.67 -> 12345.67
- 4,647.100 -> 4647.1
Expand Down
4 changes: 4 additions & 0 deletions docs/reference/docs/bulk.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,10 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh]
(Optional, Boolean) If `true`, the request's actions must target an index alias.
Defaults to `false`.

`require_data_stream`::
(Optional, Boolean) If `true`, the request's actions must target a data stream (existing or to-be-created).
Defaults to `false`.

include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing]

include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ the `cosine` measures are equivalent.
------------------------------------------------------------
PUT _inference/sparse_embedding/elser_embeddings <1>
{
"service": "elser",
"service": "elasticsearch",
"service_settings": {
"num_allocations": 1,
"num_threads": 1
Expand Down Expand Up @@ -206,7 +206,7 @@ PUT _inference/text_embedding/google_vertex_ai_embeddings <1>
<2> A valid service account in JSON format for the Google Vertex AI API.
<3> For the list of the available models, refer to the https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api[Text embeddings API] page.
<4> The name of the location to use for the {infer} task. Refer to https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations[Generative AI on Vertex AI locations] for available locations.
<5> The name of the project to use for the {infer} task.
<5> The name of the project to use for the {infer} task.

// end::google-vertex-ai[]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
Expand Down Expand Up @@ -131,9 +130,7 @@ public TimeValue getTook() {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeArray(items);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_0_0)) {
out.writeVLong(tookInMillis);
}
out.writeVLong(tookInMillis);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1359,7 +1359,7 @@ public void testKnnQueryNotSupportedInPercolator() throws IOException {
""");
indicesAdmin().prepareCreate("index1").setMapping(mappings).get();
ensureGreen();
QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, 10, null);
QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, 10, null, null);

IndexRequestBuilder indexRequestBuilder = prepareIndex("index1").setId("knn_query1")
.setSource(jsonBuilder().startObject().field("my_query", knnVectorQueryBuilder).endObject());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1025,7 +1025,7 @@ public void onResponse(Void unused) {
// should be no other processes interacting with the repository.
logger.warn(
Strings.format(
"failed to clean up multipart upload [{}] of blob [{}][{}][{}]",
"failed to clean up multipart upload [%s] of blob [%s][%s][%s]",
abortMultipartUploadRequest.getUploadId(),
blobStore.getRepositoryMetadata().name(),
abortMultipartUploadRequest.getBucketName(),
Expand Down
36 changes: 33 additions & 3 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -274,9 +274,6 @@ tests:
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/120_data_streams_stats/Multiple data stream}
issue: https://github.com/elastic/elasticsearch/issues/118217
- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT
method: testEveryActionIsEitherOperatorOnlyOrNonOperator
issue: https://github.com/elastic/elasticsearch/issues/118220
- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/118224
- class: org.elasticsearch.packaging.test.ArchiveTests
Expand All @@ -291,6 +288,39 @@ tests:
- class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS1UnavailableRemotesIT
method: testEsqlRcs1UnavailableRemoteScenarios
issue: https://github.com/elastic/elasticsearch/issues/118350
- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests
method: testSearcherId
issue: https://github.com/elastic/elasticsearch/issues/118374
- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT
method: test {p0=/10_info/Info}
issue: https://github.com/elastic/elasticsearch/issues/118394
- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT
method: test {p0=/11_nodes/Additional disk information}
issue: https://github.com/elastic/elasticsearch/issues/118395
- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT
method: test {p0=/11_nodes/Test cat nodes output with full_id set}
issue: https://github.com/elastic/elasticsearch/issues/118396
- class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT
method: test {p0=/11_nodes/Test cat nodes output}
issue: https://github.com/elastic/elasticsearch/issues/118397
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=migrate/20_reindex_status/Test get reindex status with nonexistent task id}
issue: https://github.com/elastic/elasticsearch/issues/118401
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=migrate/10_reindex/Test Reindex With Nonexistent Data Stream}
issue: https://github.com/elastic/elasticsearch/issues/118274
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=migrate/10_reindex/Test Reindex With Bad Data Stream Name}
issue: https://github.com/elastic/elasticsearch/issues/118272
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=migrate/10_reindex/Test Reindex With Unsupported Mode}
issue: https://github.com/elastic/elasticsearch/issues/118273
- class: org.elasticsearch.xpack.inference.InferenceCrudIT
method: testUnifiedCompletionInference
issue: https://github.com/elastic/elasticsearch/issues/118405
- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT
method: testEveryActionIsEitherOperatorOnlyOrNonOperator
issue: https://github.com/elastic/elasticsearch/issues/118220

# Examples:
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ private String getRollupIndexName() throws IOException {
if (asMap.size() == 1) {
return (String) asMap.keySet().toArray()[0];
}
logger.warn("--> No matching rollup name for path [%s]", endpoint);
logger.warn("--> No matching rollup name for path [{}]", endpoint);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ private String getRollupIndexName() throws IOException {
if (asMap.size() == 1) {
return (String) asMap.keySet().toArray()[0];
}
logger.warn("--> No matching rollup name for path [%s]", endpoint);
logger.warn("--> No matching rollup name for path [{}]", endpoint);
return null;
}

Expand Down
8 changes: 2 additions & 6 deletions rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,6 @@
"type":"time",
"description":"Explicit operation timeout"
},
"type":{
"type":"string",
"description":"Default document type for items which don't provide one"
},
"_source":{
"type":"list",
"description":"True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"
Expand All @@ -78,11 +74,11 @@
},
"require_alias": {
"type": "boolean",
"description": "Sets require_alias for all incoming documents. Defaults to unset (false)"
"description": "If true, the request’s actions must target an index alias. Defaults to false."
},
"require_data_stream": {
"type": "boolean",
"description": "When true, requires the destination to be a data stream (existing or to-be-created). Default is false"
"description": "If true, the request's actions must target a data stream (existing or to-be-created). Default to false"
},
"list_executed_pipelines": {
"type": "boolean",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ setup:
dims: 5
index: true
index_options:
type: hnsw
type: int8_hnsw
similarity: l2_norm

- do:
Expand Down Expand Up @@ -73,3 +73,59 @@ setup:
- match: {hits.total.value: 1}
- match: {hits.hits.0._id: "3"}
- match: {hits.hits.0.fields.name.0: "rabbit.jpg"}

---
"Vector rescoring has no effect for non-quantized vectors and provides same results as non-rescored knn":
- requires:
reason: 'Quantized vector rescoring is required'
test_runner_features: [capabilities]
capabilities:
- method: GET
path: /_search
capabilities: [knn_quantized_vector_rescore]
- skip:
features: "headers"

# Rescore
- do:
headers:
Content-Type: application/json
search:
rest_total_hits_as_int: true
index: index1
body:
knn:
field: vector
query_vector: [2, 2, 2, 2, 3]
k: 3
num_candidates: 3
rescore_vector:
num_candidates_factor: 1.5

# Get rescoring scores - hit ordering may change depending on how things are distributed
- match: { hits.total: 3 }
- set: { hits.hits.0._score: rescore_score0 }
- set: { hits.hits.1._score: rescore_score1 }
- set: { hits.hits.2._score: rescore_score2 }

# Exact knn via script score
- do:
headers:
Content-Type: application/json
search:
rest_total_hits_as_int: true
index: index1
body:
query:
script_score:
query: {match_all: {} }
script:
source: "1.0 / (1.0 + Math.pow(l2norm(params.query_vector, 'vector'), 2.0))"
params:
query_vector: [2, 2, 2, 2, 3]

# Compare scores as hit IDs may change depending on how things are distributed
- match: { hits.total: 3 }
- match: { hits.hits.0._score: $rescore_score0 }
- match: { hits.hits.1._score: $rescore_score1 }
- match: { hits.hits.2._score: $rescore_score2 }
Loading

0 comments on commit 85d3362

Please sign in to comment.