From 797a70483b502ebf37b70ff2fd8c2338772f4ca1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 30 Nov 2023 07:08:08 +0000 Subject: [PATCH 001/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-964dbe334a8 --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 3 files changed, 75 insertions(+), 75 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 575d8310e9e24..6fcd9604a6d0a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a6d788e1138 +lucene = 9.10.0-snapshot-964dbe334a8 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 3f44db9928434..97117e9cbc077 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.9.0 -:lucene_version_path: 9_9_0 +:lucene_version: 9.10.0 +:lucene_version_path: 9_10_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d90d60bf701e1..9efbad76f5849 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 83eae8906a7986552654a100313c81874ee40615 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 08:35:38 +0000 Subject: [PATCH 002/107] Add IndexVersion constant for Lucene 9.10 --- server/src/main/java/org/elasticsearch/index/IndexVersions.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7c99764e44283..52913ddb55600 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -92,6 +92,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = def(8_500_010, Version.LUCENE_9_9_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_500_099, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, From cb1827c780730072980aa9bc13a475dd1de5d8bf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 1 Dec 2023 07:11:07 +0000 Subject: [PATCH 003/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-6d6e88f107d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6fcd9604a6d0a..f8e8fdfeac93b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.10.0-snapshot-964dbe334a8 +lucene = 9.10.0-snapshot-6d6e88f107d bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9efbad76f5849..991638c800793 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3fff61b784a3b87e7fb1071b5ac6f6a09d76e6d2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 2 Dec 2023 07:10:23 +0000 Subject: [PATCH 004/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-cc9b7394690 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 146 ++++++++++++------------ 2 files changed, 74 insertions(+), 74 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f8e8fdfeac93b..dd4c45270e740 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.10.0-snapshot-6d6e88f107d +lucene = 9.10.0-snapshot-cc9b7394690 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 991638c800793..622677a05deeb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1,5 +1,5 @@ - + false false @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b373a9d6719de5c5db3fe7a67f440b206c1fcff0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 3 Dec 2023 07:09:24 +0000 Subject: [PATCH 005/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-b45defb3cf9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index dd4c45270e740..e50f73178ec18 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.10.0-snapshot-cc9b7394690 +lucene = 9.10.0-snapshot-b45defb3cf9 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 622677a05deeb..2b4225c2aafce 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5987518b5a8edee9b524925fb165b2bbe99d60c1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 4 Dec 2023 07:09:17 +0000 Subject: [PATCH 006/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-b45defb3cf9 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b4225c2aafce..8733526109e61 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 5088dfebef4ed10d12c744d2d00dd066baba2f01 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 5 Dec 2023 07:10:21 +0000 Subject: [PATCH 007/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-b45defb3cf9 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index df98a444ba99c..3c81af572a43c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8fb1d8527f1e8781703a4ea369476dddfcf4173d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 6 Dec 2023 07:08:58 +0000 Subject: [PATCH 008/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5852a0fb8ca --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5d89e417c79e9..cf6e583c43f09 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.10.0-snapshot-b45defb3cf9 +lucene = 9.10.0-snapshot-5852a0fb8ca bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b0d33cf9b5a68..d576a6063b009 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b9c16dd848c385d15e35fb5b276d6d6ab35bf75 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 7 Dec 2023 07:10:41 +0000 Subject: [PATCH 009/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5852a0fb8ca --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d576a6063b009..eb394bd119554 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From cb34e41268be5f2271c53c59118886f1e35a311d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 8 Dec 2023 07:09:20 +0000 Subject: [PATCH 010/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-65c4251718b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index cf6e583c43f09..9119597019991 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.10.0-snapshot-5852a0fb8ca +lucene = 9.10.0-snapshot-65c4251718b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index eb394bd119554..30fd984122428 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1d91cd9e5b6df849754aa5be30c4bf33d98c1c07 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 9 Dec 2023 07:09:48 +0000 Subject: [PATCH 011/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-06002e015dc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1ec42875ef793..da08a9b12b53e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-65c4251718b +lucene = 9.10.0-snapshot-06002e015dc bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 30fd984122428..c8c177821ad5d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From daecdda22d663b2de0703a59b6b660fbac6919bd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 11 Dec 2023 07:09:20 +0000 Subject: [PATCH 012/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-59c0f8257b9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index da08a9b12b53e..828703d0a8274 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-06002e015dc +lucene = 9.10.0-snapshot-59c0f8257b9 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c8c177821ad5d..3d815252b479f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From da5207ef3e597389bf0039f7f5c77ea767a0d2ce Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 12 Dec 2023 07:09:53 +0000 Subject: [PATCH 013/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-7c8d7aef42a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 828703d0a8274..c58eb124ca5e8 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-59c0f8257b9 +lucene = 9.10.0-snapshot-7c8d7aef42a bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3d815252b479f..7d8f9fdb5a74b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e52eea72fff5299eed1874e9272a514da91d8b62 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 12 Dec 2023 13:37:07 +0100 Subject: [PATCH 014/107] Remove usages of deprecated createSharedManager methods (#103261) This commit adapts Elasticsearch to not rely on createSharedManager exposed to create collector managers corresponding to TopScoreDocCollector as well as TopFieldCollector. There are some more deprecations to address around the creation of collectors, but this is a good first step. --- .../index/engine/LuceneChangesSnapshot.java | 7 +- .../elasticsearch/search/dfs/DfsPhase.java | 4 +- .../query/QueryPhaseCollectorManager.java | 13 +-- ...PassGroupingCollectorSearchAfterTests.java | 7 +- .../SinglePassGroupingCollectorTests.java | 7 +- .../query/ProfileCollectorManagerTests.java | 5 +- .../query/QueryPhaseCollectorTests.java | 97 +++++++------------ 7 files changed, 51 insertions(+), 89 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 1005f8f486beb..e63d5ef87973b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -22,7 +22,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; @@ -296,14 +296,13 @@ private TopDocs searchOperations(FieldDoc after, boolean accurateTotalHits) thro final Query rangeQuery = rangeQuery(Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo, indexVersionCreated); assert accurateTotalHits == false || after == null : "accurate total hits is required by the first batch only"; final SortField sortBySeqNo = new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG); - final TopFieldCollector collector = TopFieldCollector.create( + TopFieldCollectorManager topFieldCollectorManager = new TopFieldCollectorManager( new Sort(sortBySeqNo), searchBatchSize, after, accurateTotalHits ? Integer.MAX_VALUE : 0 ); - indexSearcher.search(rangeQuery, collector); - return collector.topDocs(); + return indexSearcher.search(rangeQuery, topFieldCollectorManager); } private Translog.Operation readDocAsOp(int docIndex) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 5d3288408c99b..6ec22f9c11135 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -17,7 +17,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -199,7 +199,7 @@ private static void executeKnnVectorQuery(SearchContext context) throws IOExcept static DfsKnnResults singleKnnSearch(Query knnQuery, int k, Profilers profilers, ContextIndexSearcher searcher, String nestedPath) throws IOException { - CollectorManager topDocsCollectorManager = TopScoreDocCollector.createSharedManager( + CollectorManager topDocsCollectorManager = new TopScoreDocCollectorManager( k, null, Integer.MAX_VALUE diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java index 86a01756d247e..7fd09d3ddfdf1 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java @@ -35,9 +35,9 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; import org.elasticsearch.action.search.MaxScoreCollector; @@ -413,14 +413,9 @@ private static class WithHits extends QueryPhaseCollectorManager { } } if (sortAndFormats == null) { - this.topDocsManager = TopScoreDocCollector.createSharedManager(numHits, searchAfter, hitCountThreshold); + this.topDocsManager = new TopScoreDocCollectorManager(numHits, searchAfter, hitCountThreshold); } else { - this.topDocsManager = TopFieldCollector.createSharedManager( - sortAndFormats.sort, - numHits, - (FieldDoc) searchAfter, - hitCountThreshold - ); + this.topDocsManager = new TopFieldCollectorManager(sortAndFormats.sort, numHits, (FieldDoc) searchAfter, hitCountThreshold); } } diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index 8ad4593602a25..bec0f83f78674 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.Directory; @@ -107,12 +107,11 @@ private > void assertSearchCollapse( ? SinglePassGroupingCollector.createNumeric("field", fieldType, sort, expectedNumGroups, after) : SinglePassGroupingCollector.createKeyword("field", fieldType, sort, expectedNumGroups, after); - TopFieldCollector topFieldCollector = TopFieldCollector.create(sort, totalHits, after, Integer.MAX_VALUE); + TopFieldCollectorManager topFieldCollectorManager = new TopFieldCollectorManager(sort, totalHits, after, Integer.MAX_VALUE); Query query = new MatchAllDocsQuery(); searcher.search(query, collapsingCollector); - searcher.search(query, topFieldCollector); + TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); - TopFieldDocs topDocs = topFieldCollector.topDocs(); assertEquals(sortField.getField(), collapseTopFieldDocs.field); assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 8dd7ed9c21896..bb4b3f42fde85 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -132,12 +132,11 @@ private > void assertSearchCollapse( ); } - TopFieldCollector topFieldCollector = TopFieldCollector.create(sort, totalHits, Integer.MAX_VALUE); + TopFieldCollectorManager topFieldCollectorManager = new TopFieldCollectorManager(sort, totalHits, Integer.MAX_VALUE); Query query = new MatchAllDocsQuery(); searcher.search(query, collapsingCollector); - searcher.search(query, topFieldCollector); + TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); - TopFieldDocs topDocs = topFieldCollector.topDocs(); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index 5cfe368a9a392..fc8b9706d387a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.search.DummyTotalHitCountCollector; @@ -121,12 +122,12 @@ public Integer reduce(Collection collectors) { */ public void testManagerWithSearcher() throws IOException { { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(10, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(10, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index b466101be07d8..f222e697488d2 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.Weight; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.store.Directory; @@ -108,7 +109,7 @@ public void testNegativeTerminateAfter() { public void testTopDocsOnly() throws IOException { { - CollectorManager topScoreDocManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topScoreDocManager, null, @@ -121,7 +122,7 @@ public void testTopDocsOnly() throws IOException { assertEquals(numDocs, result.topDocs.totalHits.value); } { - CollectorManager topScoreDocManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topScoreDocManager, null, @@ -137,7 +138,7 @@ public void testTopDocsOnly() throws IOException { public void testWithAggs() throws IOException { { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -152,7 +153,7 @@ public void testWithAggs() throws IOException { assertEquals(numDocs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -170,7 +171,7 @@ public void testWithAggs() throws IOException { public void testPostFilterTopDocsOnly() throws IOException { { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); CollectorManager> manager = createCollectorManager( @@ -185,7 +186,7 @@ public void testPostFilterTopDocsOnly() throws IOException { assertEquals(numField2Docs, result.topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); TermQuery termQuery = new TermQuery(new Term("field1", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); CollectorManager> manager = createCollectorManager( @@ -203,7 +204,7 @@ public void testPostFilterTopDocsOnly() throws IOException { public void testPostFilterWithAggs() throws IOException { { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); TermQuery termQuery = new TermQuery(new Term("field1", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); @@ -220,7 +221,7 @@ public void testPostFilterWithAggs() throws IOException { assertEquals(numDocs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); @@ -247,18 +248,14 @@ public void testMinScoreTopDocsOnly() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField2Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, null, @@ -271,7 +268,7 @@ public void testMinScoreTopDocsOnly() throws IOException { assertEquals(numField2Docs, result.topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, null, @@ -284,7 +281,7 @@ public void testMinScoreTopDocsOnly() throws IOException { assertEquals(numDocs, result.topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, null, @@ -306,18 +303,14 @@ public void testMinScoreWithAggs() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField2Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -333,7 +326,7 @@ public void testMinScoreWithAggs() throws IOException { assertEquals(numField2Docs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -348,7 +341,7 @@ public void testMinScoreWithAggs() throws IOException { assertEquals(numDocs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -374,18 +367,14 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField3Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, filterWeight, @@ -398,7 +387,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, filterWeight, @@ -411,7 +400,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { assertEquals(numField2Docs, result.topDocs.totalHits.value); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, filterWeight, @@ -435,18 +424,14 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField3Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -461,7 +446,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { assertEquals(numField3Docs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -476,7 +461,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { assertEquals(numDocs, result.aggs.intValue()); } { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -635,18 +620,14 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField2Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, null, @@ -667,18 +648,14 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField2Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, @@ -703,18 +680,14 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField3Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2AndField3Docs - 1); - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager> manager = createCollectorManager( topDocsManager, filterWeight, @@ -737,18 +710,14 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( - numField3Docs + 1, - null, - 1000 - ); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2AndField3Docs - 1); - CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); CollectorManager> manager = createCollectorManager( topDocsManager, From cf1b5bcc7ab781240b3c95d313385deff3d71e4e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 13 Dec 2023 07:09:03 +0000 Subject: [PATCH 015/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-4c3b404ba6e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c58eb124ca5e8..0784c4dad6d7d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-7c8d7aef42a +lucene = 9.10.0-snapshot-4c3b404ba6e bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7d8f9fdb5a74b..f685d3512df29 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5a3b221f1c88b4396afef4dfa2847f91a72704a4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 14 Dec 2023 07:09:45 +0000 Subject: [PATCH 016/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-ceec058767b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0784c4dad6d7d..fe19f45d2b03e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-4c3b404ba6e +lucene = 9.10.0-snapshot-ceec058767b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f685d3512df29..2897ac03538f7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0ecdb1f4240b2b7a1e55ac28ab5772a19682de3b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 15 Dec 2023 07:10:33 +0000 Subject: [PATCH 017/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-26ba2ff087d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index fe19f45d2b03e..862139f46907c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-ceec058767b +lucene = 9.10.0-snapshot-26ba2ff087d bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2897ac03538f7..7a782f02456ca 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d0bc479a2ca9924da8afef121a1b671a27fccb35 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 16 Dec 2023 07:09:57 +0000 Subject: [PATCH 018/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-7d62b23ee90 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 862139f46907c..473144d19380d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-26ba2ff087d +lucene = 9.10.0-snapshot-7d62b23ee90 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7a782f02456ca..cc00d7e4556fc 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3745c8ea26421cc7f854c991bf48ecf17b8116a8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 17 Dec 2023 07:09:30 +0000 Subject: [PATCH 019/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-45ead8fec98 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 473144d19380d..7b88ef2724c6a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-7d62b23ee90 +lucene = 9.10.0-snapshot-45ead8fec98 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cc00d7e4556fc..ffb3f947eae3e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 46fb75796a98ed0a426bea18be148f771d39b294 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 18 Dec 2023 07:10:13 +0000 Subject: [PATCH 020/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-7f75e20788c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7b88ef2724c6a..f8decd4481bdc 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-45ead8fec98 +lucene = 9.10.0-snapshot-7f75e20788c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ffb3f947eae3e..b5bcde4a6990b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 04cdf4246171fa4b04faeaa140a8682958556765 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 19 Dec 2023 07:09:39 +0000 Subject: [PATCH 021/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-41794f0957e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f8decd4481bdc..f7573ea211664 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-7f75e20788c +lucene = 9.10.0-snapshot-41794f0957e bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b5bcde4a6990b..010360624d44f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 78034db6228aa7508df904a5dd31213b61d7eae5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 20 Dec 2023 07:11:01 +0000 Subject: [PATCH 022/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-a63d4599f48 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f7573ea211664..33d532f71d983 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-41794f0957e +lucene = 9.10.0-snapshot-a63d4599f48 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 010360624d44f..6efcf6c601c1b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From bb938ca221d0feb24e3f93aa3f6b5a5c982dd6a0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 21 Dec 2023 07:10:08 +0000 Subject: [PATCH 023/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-db5eeb9fcc3 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 33d532f71d983..17a4f8e7c3e57 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-a63d4599f48 +lucene = 9.10.0-snapshot-db5eeb9fcc3 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6efcf6c601c1b..508066edc7f42 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5ee60ae2a609aecefcccb60f103e754edc9c3bdd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 22 Dec 2023 07:09:33 +0000 Subject: [PATCH 024/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-8b392a70e4c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 17a4f8e7c3e57..5acb74d6d24b8 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-db5eeb9fcc3 +lucene = 9.10.0-snapshot-8b392a70e4c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 508066edc7f42..55f8bf523c93d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6a4c1676ac26d879a3160a6f2b99177e6f007d64 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 22 Dec 2023 12:22:16 +0000 Subject: [PATCH 025/107] Update index version for new format --- server/src/main/java/org/elasticsearch/index/IndexVersions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 8e71da9a8a746..19c7fc89829d4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -97,7 +97,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion ES_VERSION_8_13 = def(8_500_009, Version.LUCENE_9_9_1); public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); - public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_500_099, Version.LUCENE_9_10_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_502_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, From c428d7d126a79de3b70eb2f4d4447f9e44c970be Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 23 Dec 2023 07:09:51 +0000 Subject: [PATCH 026/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-8b392a70e4c --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 55f8bf523c93d..2e3605ff70420 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From a5816f4a8862732bc9c805aa6ab09b569b05501e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 24 Dec 2023 07:09:58 +0000 Subject: [PATCH 027/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ca4440bdd531d..814733eb593fe 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-8b392a70e4c +lucene = 9.10.0-snapshot-86573e56d3d bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2e3605ff70420..7196a46bcb3c6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7d63cfa7b4cf7b7aed77f434785d9eafbab189a7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 25 Dec 2023 07:10:40 +0000 Subject: [PATCH 028/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7196a46bcb3c6..f7b1d39d2954c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 3f16b9028105cf4fdfbbc777c40fae260970083f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Dec 2023 07:08:58 +0000 Subject: [PATCH 029/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f7b1d39d2954c..5bc6d4460a629 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b295cc8b3ce96fb28dada61669649297fbaee378 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 27 Dec 2023 07:09:40 +0000 Subject: [PATCH 030/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5bc6d4460a629..28380adb42c53 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 1667aa7647c3ea751e558964283faf17108dfd2a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 28 Dec 2023 07:10:25 +0000 Subject: [PATCH 031/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 28380adb42c53..fef0bd4bbc201 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 48caf751663cecd245d98a14fce750ed67c2c084 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 29 Dec 2023 07:09:40 +0000 Subject: [PATCH 032/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-86573e56d3d --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fef0bd4bbc201..9526b004b40d6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 855eb73317aa1d69b344c94d7e7b35505b6b6014 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 30 Dec 2023 07:10:06 +0000 Subject: [PATCH 033/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 814733eb593fe..f36c3267bb91b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-86573e56d3d +lucene = 9.10.0-snapshot-5c375cad754 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9526b004b40d6..dc97d37fa4771 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0f3664870c3c727b6ef95b3a39e6e7f3b4bbcfab Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 31 Dec 2023 07:09:23 +0000 Subject: [PATCH 034/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index dc97d37fa4771..4efcb683f5108 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 337bdc0bf6ef8630b523ef4242709c402c97b5fd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 1 Jan 2024 07:10:07 +0000 Subject: [PATCH 035/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4efcb683f5108..0e7a77772d760 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6c92cbedbf0e42652d97b134fd2dcdd9c6e851dc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 2 Jan 2024 07:09:41 +0000 Subject: [PATCH 036/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0e7a77772d760..0a1fdf551b967 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e2cc9ec37aa8d5dfeb316e3d101a743d717032d1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 3 Jan 2024 07:10:20 +0000 Subject: [PATCH 037/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0a1fdf551b967..11c341f3d2ad7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 7272eb04bdcfb1347d5635f4ecd0f8d35022c454 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 4 Jan 2024 07:09:58 +0000 Subject: [PATCH 038/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 11c341f3d2ad7..8ebff23328618 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 1a26b55063e6456aade8c056358200270fb0d230 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 5 Jan 2024 07:09:55 +0000 Subject: [PATCH 039/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8ebff23328618..2dacbcebbdddd 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 32a7675cdde2c009b7fb801b59a95c34b7aa758b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 6 Jan 2024 07:09:28 +0000 Subject: [PATCH 040/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-5c375cad754 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2dacbcebbdddd..0c0626c49d859 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 0b89315d1bb870f8483072d9d9a767bfa6df35e7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 7 Jan 2024 07:09:43 +0000 Subject: [PATCH 041/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-af7b6ef53d3 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e890c41500267..156039b1da520 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-5c375cad754 +lucene = 9.10.0-snapshot-af7b6ef53d3 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d07dbe71e0087..fcdf564399bd3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 20932c6bdb7e3924dee81bef49dc8bdce02c41c5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 8 Jan 2024 07:10:00 +0000 Subject: [PATCH 042/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-af7b6ef53d3 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fcdf564399bd3..24dba259515ee 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2666,122 +2666,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9c964dd6411338db253fb8eff52e72160cfc6dab Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 9 Jan 2024 07:10:01 +0000 Subject: [PATCH 043/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-8e8fdea7d23 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 156039b1da520..6fa1da661a630 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-af7b6ef53d3 +lucene = 9.10.0-snapshot-8e8fdea7d23 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 24dba259515ee..31c778df0421d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ea0bfb2f2cbe4a56089f99864f855ed783ac686b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 10 Jan 2024 07:10:12 +0000 Subject: [PATCH 044/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-ad525056591 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6fa1da661a630..bdecd6c85bf7d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-8e8fdea7d23 +lucene = 9.10.0-snapshot-ad525056591 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 34413a1ef0066..809e8f670edf6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8916e8272d05b4424a1c101eb76c75b3133bb4d2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 12 Jan 2024 07:10:07 +0000 Subject: [PATCH 045/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-7ad2507c2e5 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index bdecd6c85bf7d..cda0a322f62b4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-ad525056591 +lucene = 9.10.0-snapshot-7ad2507c2e5 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 809e8f670edf6..e2252375a0bd4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a4c0fc0c4dcd956461bb036fbc6c7450d49966c0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 13 Jan 2024 07:09:46 +0000 Subject: [PATCH 046/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-fb3bcc0f61e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index cda0a322f62b4..f26f5dc67a30b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-7ad2507c2e5 +lucene = 9.10.0-snapshot-fb3bcc0f61e bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e2252375a0bd4..8c0941cfaea99 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 271e0e2c00688e683b660d3bcd0e004146044f1d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 14 Jan 2024 07:10:14 +0000 Subject: [PATCH 047/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-fb3bcc0f61e --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8c0941cfaea99..02dc9f21936c9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2666,122 +2666,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 86adada6f4525af742bc62bfa31608585bc802e3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 15 Jan 2024 07:11:37 +0000 Subject: [PATCH 048/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-a457b5fd5fc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f26f5dc67a30b..6703d8228eb81 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-fb3bcc0f61e +lucene = 9.10.0-snapshot-a457b5fd5fc bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 02dc9f21936c9..2e4649f20a6ad 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 13edac340674ed6c8241a9ab1bf6f2a7b09eb537 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 16 Jan 2024 07:10:45 +0000 Subject: [PATCH 049/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-f3d625ea06c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6703d8228eb81..96d967cda44af 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-a457b5fd5fc +lucene = 9.10.0-snapshot-f3d625ea06c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2e4649f20a6ad..4066db57d82d8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From daaf34badb17eefe084aba3b08a9bffcedb2db95 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 17 Jan 2024 07:09:53 +0000 Subject: [PATCH 050/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-00e2fe6cacb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 96d967cda44af..1702cca1f1058 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-f3d625ea06c +lucene = 9.10.0-snapshot-00e2fe6cacb bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4066db57d82d8..e37d364d258c3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From f4b644059f36296f91f3d676e34459a12f0b0969 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 18 Jan 2024 07:09:41 +0000 Subject: [PATCH 051/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-564219a65a9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1702cca1f1058..23574d1f94886 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-00e2fe6cacb +lucene = 9.10.0-snapshot-564219a65a9 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e37d364d258c3..d8a755b3e17da 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,124 +2664,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3b6e8501cf4831b28f10c00d787b2969116f38e0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 20 Jan 2024 07:11:03 +0000 Subject: [PATCH 052/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-c8980471e12 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d859efd68d688..5ea7cdf7b4293 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-564219a65a9 +lucene = 9.10.0-snapshot-c8980471e12 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d2d285adf84ab..3b726d67f5629 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 207a8fd40ad409599bcc5b191e5486f2613e1703 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 21 Jan 2024 07:10:24 +0000 Subject: [PATCH 053/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-c8980471e12 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3b726d67f5629..18295ac643581 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b3e04a83845dd25b27313859c3ed3faa1343d7e0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 22 Jan 2024 07:10:16 +0000 Subject: [PATCH 054/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-c8980471e12 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 18295ac643581..257e929698112 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 46eb1ee017b590ce32ac937b42b1d7bdb5e1b8d3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 23 Jan 2024 07:10:45 +0000 Subject: [PATCH 055/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-b951c4c0611 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5ea7cdf7b4293..bb782201a29a1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-c8980471e12 +lucene = 9.10.0-snapshot-b951c4c0611 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 257e929698112..0af19eef3ebeb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ae600d9bda4ec0e2656d25ebc03acce2ca02db60 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 24 Jan 2024 07:11:07 +0000 Subject: [PATCH 056/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-9ccfc30ddcb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index bb782201a29a1..6b2b03422c7c7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-b951c4c0611 +lucene = 9.10.0-snapshot-9ccfc30ddcb bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0af19eef3ebeb..2d1a6c69df8be 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a662e092e798972e1196a6cc1c3d57f02082eb7f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 25 Jan 2024 07:10:11 +0000 Subject: [PATCH 057/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-aabee01500d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6b2b03422c7c7..f06c994168225 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-9ccfc30ddcb +lucene = 9.10.0-snapshot-aabee01500d bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2d1a6c69df8be..45211414d4a2c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 647005f60ef648b056559e30318cb4acac8b2bd3 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 26 Jan 2024 11:49:00 -0500 Subject: [PATCH 058/107] Fix compilation for new lucene snapshot and FieldInfo ctor (#104818) Fixes compilation due to `parentField` info in `FieldInfo` ctor --- .../elasticsearch/index/engine/TranslogDirectoryReader.java | 3 +++ .../org/elasticsearch/index/mapper/DocumentLeafReader.java | 1 + .../elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java | 3 ++- .../org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 3 ++- .../lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java | 1 + 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index a09810750c66e..ab84166701c59 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -161,6 +161,7 @@ private static class TranslogLeafReader extends LeafReader { 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, + false, false ); private static final FieldInfo FAKE_ROUTING_FIELD = new FieldInfo( @@ -179,6 +180,7 @@ private static class TranslogLeafReader extends LeafReader { 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, + false, false ); private static final FieldInfo FAKE_ID_FIELD = new FieldInfo( @@ -197,6 +199,7 @@ private static class TranslogLeafReader extends LeafReader { 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, + false, false ); private static final Set TRANSLOG_FIELD_NAMES = Set.of(SourceFieldMapper.NAME, RoutingFieldMapper.NAME, IdFieldMapper.NAME); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 49934776bc4a3..db90c8f052a5e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -291,6 +291,7 @@ private static FieldInfo fieldInfo(String name) { 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, + false, false ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index c332694d93975..093ec031d0b30 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -262,7 +262,8 @@ private SegmentCommitInfo syncSegment( 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, - fieldInfo.isSoftDeletesField() + fieldInfo.isSoftDeletesField(), + fieldInfo.isParentField() ) ); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index df6fded49e6bb..25b4b685ac50f 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -109,7 +109,8 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { 0, fieldInfo.getVectorEncoding(), fieldInfo.getVectorSimilarityFunction(), - fieldInfo.isSoftDeletesField() + fieldInfo.isSoftDeletesField(), + fieldInfo.isParentField() ) ); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 9cef274aa753e..83fcb17449100 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -111,6 +111,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm 0, VectorEncoding.FLOAT32, VectorSimilarityFunction.EUCLIDEAN, + false, false ); infos[i].checkConsistency(); From b7476f5f2431a6ed4015dcb439c4ea6b5971a0a0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 27 Jan 2024 07:10:16 +0000 Subject: [PATCH 059/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-42d5806fd69 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f06c994168225..a573d8a0ac323 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-aabee01500d +lucene = 9.10.0-snapshot-42d5806fd69 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 45211414d4a2c..94cc20797fb89 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 42cad64de845fcac0aa30d2dd6f0db6aa8f16efd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 28 Jan 2024 07:09:24 +0000 Subject: [PATCH 060/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-42d5806fd69 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 94cc20797fb89..8d730fa2d7990 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b9418bd07dd92b359a9cc4a7b570e4e4189c16e8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 29 Jan 2024 07:09:35 +0000 Subject: [PATCH 061/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-42d5806fd69 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8d730fa2d7990..68ba7a1f56bfc 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From f4f841400aa5564f2e833d7d5ac9480ac355642e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 30 Jan 2024 07:10:08 +0000 Subject: [PATCH 062/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-deac9c26512 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a573d8a0ac323..6744aba490c16 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-42d5806fd69 +lucene = 9.10.0-snapshot-deac9c26512 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 68ba7a1f56bfc..7322d7854eb47 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e1b74aa88ae25b8a66b2f34e4a1bb90a37d2012b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 31 Jan 2024 07:09:51 +0000 Subject: [PATCH 063/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-1e36b461474 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6744aba490c16..9b8d8497b0219 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-deac9c26512 +lucene = 9.10.0-snapshot-1e36b461474 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7322d7854eb47..bbbf62bb7b252 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 47ca7aeb70935b2104053d605bb5aa5d1a55b0ba Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 31 Jan 2024 11:46:03 -0500 Subject: [PATCH 064/107] Update/main (#104974) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Change release version lookup to an instance method (#104902) * Upgrade to Lucene 9.9.2 (#104753) This commit upgrades to Lucene 9.9.2. * Improve `CANNOT_REBALANCE_CAN_ALLOCATE` explanation (#104904) Clarify that in this situation there is a rebalancing move that would improve the cluster balance, but there's some reason why rebalancing is not happening. Also points at the `can_rebalance_cluster_decisions` as well as the node-by-node decisions since the action needed could be described in either place. * Get from translog fails with large dense_vector (#104700) This change fixes the engine to apply the current codec when retrieving documents from the translog. We need to use the same codec than the main index in order to ensure that all the source data is indexable. The internal codec treats some fields differently than the default one, for instance dense_vectors are limited to 1024 dimensions. This PR ensures that these customizations are applied when indexing document for translog retrieval. Closes #104639 Co-authored-by: Elastic Machine * [Connector Secrets] Add delete API endpoint (#104815) * Add DELETE endpoint for /_connector/_secret/{id} * Add endpoint to write_connector_secrets cluster privilege * Merge Aggregations into InternalAggregations (#104896) This commit merges Aggregations into InternalAggregations in order to remove the unnecessary hierarchy. * [Profiling] Simplify cost calculation (#104816) * [Profiling] Add the number of cores to HostMetadata * Update AWS pricelist (remove cost_factor, add usd_per_hour) * Switch cost calculations from 'cost_factor' to 'usd_per_hour' * Remove superfluous CostEntry.toXContent() * Check for Number type in CostEntry.fromSource() * Add comment * Retry get_from_translog during relocations (#104579) During a promotable relocation, a `get_from_translog` sent by the unpromotable shard to handle a real-time get might encounter `ShardNotFoundException` or `IndexNotFoundException`. In these cases, we should retry. This is just for `GET`. I'll open a second PR for `mGET`. The relevant IT is in the Stateless PR. Relates ES-5727 * indicating fix for 8.12.1 for int8_hnsw (#104912) * Removing the assumption from some tests that the request builder's request() method always returns the same object (#104881) * [DOCS] Adds get setting and update settings asciidoc files to security API index (#104916) * [DOCS] Adds get setting and update settings asciidoc files to security API index. * [DOCS] Fixes references in docs. * Reuse APMMeterService of APMTelemetryProvider (#104906) * Mute more tests that tend to leak searchhits (#104922) * ESQL: Fix SearchStats#count(String) to count values not rows (#104891) SearchStats#count incorrectly counts the number of documents (or rows) in which a document appears instead of the actual number of values. This PR fixes this by looking at the term frequency instead of the doc count. Fix #104795 * Adding request source for cohere (#104926) * Fixing a broken javadoc comment in ReindexDocumentationIT (#104930) This fixes a javadoc comment that was broken by #104881 * Fix enabling / disabling of APM agent "recording" in APMAgentSettings (#104324) * Add `type` parameter support, for sorting, to the Query API Key API (#104625) This adds support for the `type` parameter, for sorting, to the Query API key API. The type for an API Key can currently be either `rest` or `cross_cluster`. This was overlooked in #103695 when support for the `type` parameter was first introduced only for querying. * Apply publish plugin to es-opensaml-security-api project (#104933) * Support `match` for the Query API Key API (#104594) This adds support for the `match` query type to the Query API key Information API. Note that since string values associated to API Keys are mapped as `keywords`, a `match` query with no analyzer parameter is effectively equivalent to a `term` query for such fields (e.g. `name`, `username`, `realm_name`). Relates: #101691 * [Connectors API] Relax strict response parsing for get/list operations (#104909) * Limit concurrent shards per node for ESQL (#104832) Today, we allow ESQL to execute against an unlimited number of shards concurrently on each node. This can lead to cases where we open and hold too many shards, equivalent to opening too many file descriptors or using too much memory for FieldInfos in ValuesSourceReaderOperator. This change limits the number of concurrent shards to 10 per node. This number was chosen based on the _search API, which limits it to 5. Besides the primary reason stated above, this change has other implications: We might execute fewer shards for queries with LIMIT only, leading to scenarios where we execute only some high-priority shards then stop. For now, we don't have a partial reduce at the node level, but if we introduce one in the future, it might not be as efficient as executing all shards at the same time. There are pauses between batches because batches are executed sequentially one by one. However, I believe the performance of queries executing against many shards (after can_match) is less important than resiliency. Closes #103666 * [DOCS] Support for nested functions in ES|QL STATS...BY (#104788) * Document nested expressions for stats * More docs * Apply suggestions from review - count-distinct.asciidoc - Content restructured, moving the section about approximate counts to end of doc. - count.asciidoc - Clarified that omitting the `expression` parameter in `COUNT` is equivalent to `COUNT(*)`, which counts the number of rows. - percentile.asciidoc - Moved the note about `PERCENTILE` being approximate and non-deterministic to end of doc. - stats.asciidoc - Clarified the `STATS` command - Added a note indicating that individual `null` values are skipped during aggregation * Comment out mentioning a buggy behavior * Update sum with inline function example, update test file * Fix typo * Delete line * Simplify wording * Fix conflict fix typo --------- Co-authored-by: Liam Thompson Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> * [ML] Passing input type through to cohere request (#104781) * Pushing input type through to cohere request * switching logic to allow request to always override * Fixing failure * Removing getModelId calls * Addressing feedback * Switching to enumset * [Transform] Unmute 2 remaining continuous tests: HistogramGroupByIT and TermsGroupByIT (#104898) * Adding ActionRequestLazyBuilder implementation of RequestBuilder (#104927) This introduces a second implementation of RequestBuilder (#104778). As opposed to ActionRequestBuilder, ActionRequestLazyBuilder does not create its request until the request() method is called, and does not hold onto that request (so each call to request() gets a new request instance). This PR also updates BulkRequestBuilder to inherit from ActionRequestLazyBuilder as an example of its use. * Update versions to skip after backport to 8.12 (#104953) * Update/Cleanup references to old tracing.apm.* legacy settings in favor of the telemetry.* settings (#104917) * Exclude tests that do not work in a mixed cluster scenario (#104935) * ES|QL: Improve type validation in aggs for UNSIGNED_LONG and better support for VERSION (#104911) * [Connector API] Make update configuration action non-additive (#104615) * Save allocating enum values array in two hot spots (#104952) Our readEnum code instantiates/clones enum value arrays on read. Normally, this doesn't matter much but the two spots adjusted here are visibly hot during bulk indexing, causing GBs of allocations during e.g. the http_logs indexing run. * ESQL: Correct out-of-range filter pushdowns (#99961) Fix pushed down filters for binary comparisons that compare a byte/short/int/long with an out of range value, like WHERE some_int_field < 1E300. * [DOCS] Dense vector element type should be float for OpenAI (#104966) * Fix test assertions (#104963) * Move functions that generate lucene geometries under a utility class (#104928) We have functions that generate lucene geometries scattered in different places of the code. This commit moves everything under a utility class. * fixing index versions --------- Co-authored-by: Simon Cooper Co-authored-by: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Co-authored-by: David Turner Co-authored-by: Jim Ferenczi Co-authored-by: Elastic Machine Co-authored-by: Navarone Feekery <13634519+navarone-feekery@users.noreply.github.com> Co-authored-by: Ignacio Vera Co-authored-by: Tim Rühsen Co-authored-by: Pooya Salehi Co-authored-by: Keith Massey Co-authored-by: István Zoltán Szabó Co-authored-by: Moritz Mack Co-authored-by: Costin Leau Co-authored-by: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Co-authored-by: Albert Zaharovits Co-authored-by: Mark Vieira Co-authored-by: Jedr Blaszyk Co-authored-by: Nhat Nguyen Co-authored-by: Abdon Pijpelink Co-authored-by: Liam Thompson Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Co-authored-by: Przemysław Witek Co-authored-by: Joe Gallo Co-authored-by: Lorenzo Dematté Co-authored-by: Luigi Dell'Aquila Co-authored-by: Armin Braun Co-authored-by: Alexander Spies Co-authored-by: David Kyle --- TRACING.md | 12 +- .../gradle/testclusters/RunTask.java | 15 +- .../server/cli/APMJvmOptionsTests.java | 2 - docs/changelog/104594.yaml | 5 + docs/changelog/104625.yaml | 6 + docs/changelog/104753.yaml | 5 + docs/changelog/104832.yaml | 6 + docs/changelog/104891.yaml | 6 + docs/changelog/104904.yaml | 5 + docs/changelog/104909.yaml | 5 + docs/changelog/104911.yaml | 7 + docs/changelog/104927.yaml | 5 + docs/changelog/99961.yaml | 6 + docs/reference/esql/functions/avg.asciidoc | 19 +- .../esql/functions/count-distinct.asciidoc | 64 ++- docs/reference/esql/functions/count.asciidoc | 20 +- docs/reference/esql/functions/max.asciidoc | 21 +- .../median-absolute-deviation.asciidoc | 20 +- docs/reference/esql/functions/median.asciidoc | 19 +- docs/reference/esql/functions/min.asciidoc | 21 +- .../esql/functions/percentile.asciidoc | 43 +- docs/reference/esql/functions/sum.asciidoc | 21 +- .../esql/processing-commands/stats.asciidoc | 48 +- docs/reference/release-notes/8.12.0.asciidoc | 2 + docs/reference/rest-api/security.asciidoc | 2 + .../rest-api/security/get-settings.asciidoc | 11 +- .../rest-api/security/query-api-key.asciidoc | 14 +- .../security/update-settings.asciidoc | 26 +- .../semantic-search-inference.asciidoc | 64 +-- .../elasticsearch/xcontent/XContentType.java | 8 +- .../bucket/TimeSeriesAggregationsIT.java | 14 +- .../DerivativePipelineAggregator.java | 1 - modules/apm/METERING.md | 2 +- .../org/elasticsearch/telemetry/apm/APM.java | 8 +- .../apm/internal/APMAgentSettings.java | 31 +- .../apm/internal/APMMeterService.java | 2 +- .../apm/internal/APMTelemetryProvider.java | 6 +- .../apm/internal/APMAgentSettingsTests.java | 200 ++++++-- .../test/ingest/220_drop_processor.yml | 8 +- .../rest-api-spec/test/ingest/60_fail.yml | 4 +- .../index/rankeval/RatedRequestsTests.java | 4 + .../documentation/ReindexDocumentationIT.java | 3 +- .../elasticsearch/reindex/CancelTests.java | 52 +- .../reindex/ReindexSingleNodeTests.java | 2 +- qa/apm/docker-compose.yml | 10 +- qa/mixed-cluster/build.gradle | 14 +- .../api/connector_secret.delete.json | 28 ++ .../action/search/TransportSearchIT.java | 6 +- .../action/termvectors/GetTermVectorsIT.java | 2 +- .../AggregationsIntegrationIT.java | 2 +- .../search/aggregations/CombiIT.java | 2 +- .../search/aggregations/MetadataIT.java | 4 +- .../SignificantTermsSignificanceScoreIT.java | 10 +- .../metrics/ScriptedMetricIT.java | 4 +- ...ketMetricsPipeLineAggregationTestCase.java | 4 +- .../pipeline/ExtendedStatsBucketIT.java | 6 +- .../pipeline/PercentilesBucketIT.java | 6 +- .../aggregations/pipeline/StatsBucketIT.java | 6 +- .../search/geo/GeoDistanceIT.java | 4 +- .../org/elasticsearch/TransportVersion.java | 8 + .../org/elasticsearch/TransportVersions.java | 7 +- .../action/ActionRequestLazyBuilder.java | 61 +++ .../action/bulk/BulkRequestBuilder.java | 149 +++++- .../action/get/TransportGetAction.java | 129 +++-- .../get/TransportGetFromTranslogAction.java | 1 - .../action/index/IndexRequest.java | 3 +- .../action/search/SearchResponse.java | 17 +- .../action/search/SearchResponseMerger.java | 2 +- .../action/search/SearchResponseSections.java | 8 +- .../action/support/WriteRequest.java | 4 +- .../routing/allocation/Explanations.java | 8 +- .../common/geo/LuceneGeometriesUtils.java | 449 +++++++++++++++++ .../org/elasticsearch/index/IndexVersion.java | 8 + .../elasticsearch/index/IndexVersions.java | 11 +- .../index/engine/InternalEngine.java | 2 +- .../index/engine/TranslogDirectoryReader.java | 16 +- .../index/mapper/GeoShapeIndexer.java | 17 +- .../index/mapper/GeoShapeQueryable.java | 185 +------ .../inference/InferenceService.java | 8 +- .../elasticsearch/inference/InputType.java | 5 +- .../elasticsearch/node/NodeConstruction.java | 2 +- .../elasticsearch/search/SearchService.java | 2 +- .../search/aggregations/Aggregations.java | 145 ------ .../aggregations/InternalAggregations.java | 138 ++++- .../InternalMultiBucketAggregation.java | 2 +- .../sampler/random/InternalRandomSampler.java | 4 +- .../BucketMetricsPipelineAggregator.java | 4 +- .../BucketScriptPipelineAggregator.java | 1 - .../CumulativeSumPipelineAggregator.java | 1 - .../SerialDiffPipelineAggregator.java | 1 - .../pipeline/SiblingPipelineAggregator.java | 3 +- .../action/bulk/BulkRequestBuilderTests.java | 37 ++ .../index/IndexRequestBuilderTests.java | 5 + .../geo/LuceneGeometriesUtilsTests.java | 476 ++++++++++++++++++ .../index/shard/ShardGetServiceTests.java | 66 ++- .../HierarchyCircuitBreakerServiceTests.java | 7 +- .../search/SearchServiceTests.java | 2 +- .../InternalAggregationsTests.java | 6 +- .../terms/RareTermsAggregatorTests.java | 10 +- .../pipeline/AvgBucketAggregatorTests.java | 11 +- .../search/query/QuerySearchResultTests.java | 6 +- .../SharedSignificantTermsTestMethods.java | 4 +- .../test/apmintegration/MetricsApmIT.java | 4 +- .../test/apmintegration/TracesApmIT.java | 6 +- .../search/MockSearchService.java | 15 + .../es-opensaml-security-api/build.gradle | 1 + ...mulativeCardinalityPipelineAggregator.java | 1 - .../MovingPercentilesPipelineAggregator.java | 12 +- .../NormalizePipelineAggregator.java | 1 - .../inference/action/InferenceAction.java | 26 +- .../evaluation/EvaluationMetric.java | 4 +- .../evaluation/classification/Accuracy.java | 4 +- .../evaluation/classification/AucRoc.java | 4 +- .../MulticlassConfusionMatrix.java | 4 +- .../evaluation/classification/Precision.java | 4 +- .../evaluation/classification/Recall.java | 4 +- .../AbstractConfusionMatrixMetric.java | 6 +- .../evaluation/outlierdetection/AucRoc.java | 4 +- .../outlierdetection/ConfusionMatrix.java | 4 +- .../outlierdetection/Precision.java | 4 +- .../evaluation/outlierdetection/Recall.java | 4 +- .../evaluation/regression/Huber.java | 4 +- .../regression/MeanSquaredError.java | 4 +- .../MeanSquaredLogarithmicError.java | 4 +- .../evaluation/regression/RSquared.java | 4 +- .../privilege/ClusterPrivilegeResolver.java | 2 +- .../evaluation/MockAggregations.java | 17 +- .../classification/AccuracyTests.java | 3 +- .../classification/ClassificationTests.java | 6 +- .../MulticlassConfusionMatrixTests.java | 9 +- .../classification/PrecisionTests.java | 14 +- .../classification/RecallTests.java | 14 +- .../ConfusionMatrixTests.java | 4 +- .../outlierdetection/PrecisionTests.java | 8 +- .../outlierdetection/RecallTests.java | 8 +- .../evaluation/regression/HuberTests.java | 8 +- .../regression/MeanSquaredErrorTests.java | 8 +- .../MeanSquaredLogarithmicErrorTests.java | 8 +- .../evaluation/regression/RSquaredTests.java | 16 +- .../DownsampleActionSingleNodeTests.java | 24 +- .../335_connector_update_configuration.yml | 30 +- .../entsearch/510_connector_secret_get.yml | 2 +- .../entsearch/520_connector_secret_delete.yml | 71 +++ .../xpack/application/EnterpriseSearch.java | 8 +- .../application/connector/Connector.java | 52 +- .../connector/ConnectorIndexService.java | 70 ++- .../connector/ConnectorSearchResult.java | 51 ++ .../connector/ConnectorsAPISearchResult.java | 89 ++++ .../connector/action/GetConnectorAction.java | 12 +- .../connector/action/ListConnectorAction.java | 10 +- .../secrets/ConnectorSecretsIndexService.java | 17 + .../action/DeleteConnectorSecretAction.java | 19 + .../action/DeleteConnectorSecretRequest.java | 67 +++ .../action/DeleteConnectorSecretResponse.java | 60 +++ .../RestDeleteConnectorSecretAction.java | 42 ++ .../TransportDeleteConnectorSecretAction.java | 41 ++ .../connector/syncjob/ConnectorSyncJob.java | 128 ++--- .../syncjob/ConnectorSyncJobIndexService.java | 30 +- .../syncjob/ConnectorSyncJobSearchResult.java | 52 ++ .../action/GetConnectorSyncJobAction.java | 8 +- .../action/ListConnectorSyncJobsAction.java | 7 +- .../connector/ConnectorIndexServiceTests.java | 79 ++- .../connector/ConnectorTestUtils.java | 28 ++ ...ctorActionResponseBWCSerializingTests.java | 16 +- ...ctorActionResponseBWCSerializingTests.java | 5 +- .../ConnectorSecretsIndexServiceTests.java | 41 ++ .../secrets/ConnectorSecretsTestUtils.java | 11 + .../DeleteConnectorSecretActionTests.java | 34 ++ ...ectorSecretRequestBWCSerializingTests.java | 37 ++ ...ctorSecretResponseBWCSerializingTests.java | 46 ++ ...sportDeleteConnectorSecretActionTests.java | 72 +++ .../ConnectorSyncJobIndexServiceTests.java | 50 +- .../syncjob/ConnectorSyncJobTestUtils.java | 31 +- .../syncjob/ConnectorSyncJobTests.java | 9 +- ...JobsActionResponseBWCSerializingTests.java | 2 +- .../eql/execution/search/RuntimeUtils.java | 4 +- .../execution/sample/CircuitBreakerTests.java | 4 +- .../exchange/ExchangeSinkHandler.java | 5 +- x-pack/plugin/esql/qa/server/build.gradle | 2 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 192 ++++++- .../src/main/resources/mapping-all-types.json | 61 +++ .../src/main/resources/show.csv-spec | 32 +- .../src/main/resources/stats.csv-spec | 129 +++++ .../resources/stats_count_distinct.csv-spec | 12 + .../main/resources/stats_percentile.csv-spec | 39 ++ .../action/AbstractEsqlIntegTestCase.java | 3 + .../xpack/esql/action/ManyShardsIT.java | 75 ++- .../xpack/esql/action/WarningsIT.java | 13 +- .../expression/function/aggregate/Avg.java | 12 +- .../function/aggregate/CountDistinct.java | 30 +- .../expression/function/aggregate/Max.java | 8 +- .../expression/function/aggregate/Median.java | 14 +- .../aggregate/MedianAbsoluteDeviation.java | 7 +- .../expression/function/aggregate/Min.java | 8 +- .../function/aggregate/NumericAggregate.java | 16 +- .../function/aggregate/Percentile.java | 14 +- .../expression/function/aggregate/Sum.java | 2 +- .../xpack/esql/planner/AggregateMapper.java | 21 +- .../esql/planner/EsqlTranslatorHandler.java | 121 +++++ .../xpack/esql/plugin/ComputeService.java | 125 +++-- .../xpack/esql/plugin/QueryPragmas.java | 10 + .../xpack/esql/stats/SearchStats.java | 4 +- .../xpack/esql/analysis/AnalyzerTests.java | 42 ++ .../xpack/esql/analysis/VerifierTests.java | 4 +- .../LocalPhysicalPlanOptimizerTests.java | 153 +++++- .../mock/TestInferenceServiceExtension.java | 5 +- .../action/TransportInferenceAction.java | 1 + .../action/cohere/CohereActionCreator.java | 5 +- .../action/cohere/CohereActionVisitor.java | 3 +- .../action/openai/OpenAiActionCreator.java | 2 +- .../cohere/CohereEmbeddingsRequest.java | 1 + .../cohere/CohereEmbeddingsRequestEntity.java | 30 +- .../external/request/cohere/CohereUtils.java | 9 + .../inference/services/SenderService.java | 12 +- .../inference/services/ServiceUtils.java | 38 +- .../services/cohere/CohereModel.java | 3 +- .../services/cohere/CohereService.java | 6 +- .../embeddings/CohereEmbeddingsModel.java | 19 +- .../CohereEmbeddingsServiceSettings.java | 3 +- .../CohereEmbeddingsTaskSettings.java | 104 +++- .../services/elser/ElserMlNodeService.java | 9 +- .../huggingface/HuggingFaceBaseService.java | 2 + .../services/openai/OpenAiService.java | 2 + .../embeddings/OpenAiEmbeddingsModel.java | 18 +- .../OpenAiEmbeddingsTaskSettings.java | 24 +- .../xpack/inference/InputTypeTests.java | 21 + .../action/InferenceActionRequestTests.java | 82 ++- .../cohere/CohereActionCreatorTests.java | 2 +- .../cohere/CohereEmbeddingsActionTests.java | 9 + .../CohereEmbeddingsRequestEntityTests.java | 5 + .../cohere/CohereEmbeddingsRequestTests.java | 16 + .../services/SenderServiceTests.java | 2 + .../inference/services/ServiceUtilsTests.java | 49 +- .../services/cohere/CohereServiceTests.java | 194 ++++++- .../CohereEmbeddingsModelTests.java | 141 +++++- .../CohereEmbeddingsTaskSettingsTests.java | 60 ++- .../HuggingFaceBaseServiceTests.java | 3 +- .../huggingface/HuggingFaceServiceTests.java | 5 +- .../services/openai/OpenAiServiceTests.java | 7 +- .../OpenAiEmbeddingsModelTests.java | 6 +- .../OpenAiEmbeddingsTaskSettingsTests.java | 6 +- .../TransportGetOverallBucketsAction.java | 4 +- .../xpack/ml/aggs/MlAggsHelper.java | 8 +- .../changepoint/ChangePointAggregator.java | 4 +- .../BucketCorrelationAggregator.java | 4 +- .../InferencePipelineAggregator.java | 8 +- .../kstest/BucketCountKSTestAggregator.java | 4 +- .../AbstractAggregationDataExtractor.java | 13 +- .../AggregationToJsonProcessor.java | 15 +- .../CompositeAggregationDataExtractor.java | 10 +- .../chunked/ChunkedDataExtractor.java | 6 +- .../ExtractedFieldsDetectorFactory.java | 4 +- .../TrainTestSplitterFactory.java | 4 +- .../job/persistence/JobResultsProvider.java | 12 +- .../OverallBucketsProvider.java | 4 +- .../xpack/profiling/CostCalculator.java | 4 +- .../xpack/profiling/CostEntry.java | 29 +- .../xpack/profiling/HostMetadata.java | 12 +- .../main/resources/profiling-costs.json.gz | Bin 59986 -> 72998 bytes .../xpack/profiling/CO2CalculatorTests.java | 12 +- .../xpack/profiling/CostCalculatorTests.java | 6 +- .../rollup/RollupResponseTranslator.java | 10 +- .../xpack/rollup/job/IndexerUtils.java | 6 +- .../RollupResponseTranslationTests.java | 19 +- .../rollup/action/SearchActionTests.java | 11 +- .../xpack/security/operator/Constants.java | 1 + .../xpack/security/QueryApiKeyIT.java | 11 + .../xpack/security/apikey/ApiKeyRestIT.java | 52 ++ .../apikey/TransportQueryApiKeyAction.java | 25 +- .../support/ApiKeyBoolQueryBuilder.java | 49 +- .../TransportQueryApiKeyActionTests.java | 30 +- .../service/ElasticServiceAccountsTests.java | 1 + .../support/ApiKeyBoolQueryBuilderTests.java | 213 +++++++- .../xpack/spatial/common/ShapeUtils.java | 68 --- .../index/mapper/CartesianShapeIndexer.java | 15 +- .../index/query/ShapeQueryPointProcessor.java | 153 +----- .../index/query/ShapeQueryProcessor.java | 148 +----- .../CartesianShapeDocValuesQueryTests.java | 10 +- .../xpack/sql/execution/search/Querier.java | 9 +- .../continuous/HistogramGroupByIT.java | 2 - .../continuous/TermsGroupByIT.java | 2 - .../TransformUsageTransportAction.java | 4 +- .../common/AbstractCompositeAggFunction.java | 6 +- .../CompositeBucketsChangeCollector.java | 16 +- .../vectortile/rest/RestVectorTileAction.java | 6 +- .../HistoryTemplateEmailMappingsTests.java | 4 +- .../HistoryTemplateHttpMappingsTests.java | 4 +- ...storyTemplateIndexActionMappingsTests.java | 4 +- ...storyTemplateSearchInputMappingsTests.java | 4 +- 289 files changed, 5805 insertions(+), 1895 deletions(-) create mode 100644 docs/changelog/104594.yaml create mode 100644 docs/changelog/104625.yaml create mode 100644 docs/changelog/104753.yaml create mode 100644 docs/changelog/104832.yaml create mode 100644 docs/changelog/104891.yaml create mode 100644 docs/changelog/104904.yaml create mode 100644 docs/changelog/104909.yaml create mode 100644 docs/changelog/104911.yaml create mode 100644 docs/changelog/104927.yaml create mode 100644 docs/changelog/99961.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json create mode 100644 server/src/main/java/org/elasticsearch/action/ActionRequestLazyBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/common/geo/LuceneGeometriesUtils.java delete mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java create mode 100644 server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java create mode 100644 server/src/test/java/org/elasticsearch/common/geo/LuceneGeometriesUtilsTests.java create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSearchResult.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorsAPISearchResult.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobSearchResult.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-all-types.json create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InputTypeTests.java delete mode 100644 x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/ShapeUtils.java diff --git a/TRACING.md b/TRACING.md index 82f9b0f52fd8b..5c754e383c8e5 100644 --- a/TRACING.md +++ b/TRACING.md @@ -23,18 +23,18 @@ You must supply configuration and credentials for the APM server (see below). In your `elasticsearch.yml` add the following configuration: ``` -tracing.apm.enabled: true +telemetry.tracing.enabled: true telemetry.agent.server_url: https://:443 ``` -When using a secret token to authenticate with the APM server, you must add it to the Elasticsearch keystore under `tracing.apm.secret_token`. For example, execute: +When using a secret token to authenticate with the APM server, you must add it to the Elasticsearch keystore under `telemetry.secret_token`. For example, execute: - bin/elasticsearch-keystore add tracing.apm.secret_token + bin/elasticsearch-keystore add telemetry.secret_token -then enter the token when prompted. If you are using API keys, change the keystore key name to `tracing.apm.api_key`. +then enter the token when prompted. If you are using API keys, change the keystore key name to `telemetry.api_key`. -All APM settings live under `tracing.apm`. All settings related to the Java agent -go under `telemetry.agent`. Anything you set under there will be propagated to +All APM settings live under `telemetry`. Tracing related settings go under `telemetry.tracing` and settings +related to the Java agent go under `telemetry.agent`. Anything you set under there will be propagated to the agent. For agent settings that can be changed dynamically, you can use the cluster diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 746a09d242761..9216b538bd313 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -201,10 +201,10 @@ public void beforeStart() { try { mockServer.start(); node.setting("telemetry.metrics.enabled", "true"); - node.setting("tracing.apm.enabled", "true"); - node.setting("tracing.apm.agent.transaction_sample_rate", "0.10"); - node.setting("tracing.apm.agent.metrics_interval", "10s"); - node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); + node.setting("telemetry.tracing.enabled", "true"); + node.setting("telemetry.agent.transaction_sample_rate", "0.10"); + node.setting("telemetry.agent.metrics_interval", "10s"); + node.setting("telemetry.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); } catch (IOException e) { logger.warn("Unable to start APM server", e); } @@ -213,9 +213,10 @@ public void beforeStart() { // if metrics were not enabled explicitly for gradlew run we should disable them else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); - } else if (node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing - node.setting("tracing.apm.enable", "false"); - } + } else if (node.getSettingKeys().contains("telemetry.tracing.enabled") == false + && node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing + node.setting("telemetry.tracing.enable", "false"); + } } } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java index 6e337b0b61845..e8a8d3ee8df77 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java @@ -108,7 +108,6 @@ public void testExtractSecureSettings() { public void testExtractSettings() throws UserException { Function buildSettings = (prefix) -> Settings.builder() - .put("tracing.apm.enabled", true) .put(prefix + "server_url", "https://myurl:443") .put(prefix + "service_node_name", "instance-0000000001"); @@ -158,7 +157,6 @@ public void testExtractSettings() throws UserException { IllegalStateException.class, () -> APMJvmOptions.extractApmSettings( Settings.builder() - .put("tracing.apm.enabled", true) .put("tracing.apm.agent.server_url", "https://myurl:443") .put("telemetry.agent.server_url", "https://myurl-2:443") .build() diff --git a/docs/changelog/104594.yaml b/docs/changelog/104594.yaml new file mode 100644 index 0000000000000..7729eb028f68e --- /dev/null +++ b/docs/changelog/104594.yaml @@ -0,0 +1,5 @@ +pr: 104594 +summary: Support of `match` for the Query API Key API +area: Authentication +type: enhancement +issues: [] diff --git a/docs/changelog/104625.yaml b/docs/changelog/104625.yaml new file mode 100644 index 0000000000000..28951936107fb --- /dev/null +++ b/docs/changelog/104625.yaml @@ -0,0 +1,6 @@ +pr: 104625 +summary: "Add support for the `type` parameter, for sorting, to the Query API Key\ + \ API" +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/104753.yaml b/docs/changelog/104753.yaml new file mode 100644 index 0000000000000..f95fd3da44084 --- /dev/null +++ b/docs/changelog/104753.yaml @@ -0,0 +1,5 @@ +pr: 104753 +summary: Upgrade to Lucene 9.9.2 +area: Search +type: upgrade +issues: [] diff --git a/docs/changelog/104832.yaml b/docs/changelog/104832.yaml new file mode 100644 index 0000000000000..89f837b1c3475 --- /dev/null +++ b/docs/changelog/104832.yaml @@ -0,0 +1,6 @@ +pr: 104832 +summary: Limit concurrent shards per node for ESQL +area: ES|QL +type: bug +issues: + - 103666 diff --git a/docs/changelog/104891.yaml b/docs/changelog/104891.yaml new file mode 100644 index 0000000000000..690f2c4b11f88 --- /dev/null +++ b/docs/changelog/104891.yaml @@ -0,0 +1,6 @@ +pr: 104891 +summary: "ESQL: Fix `SearchStats#count(String)` to count values not rows" +area: ES|QL +type: bug +issues: + - 104795 diff --git a/docs/changelog/104904.yaml b/docs/changelog/104904.yaml new file mode 100644 index 0000000000000..07e22feb144ed --- /dev/null +++ b/docs/changelog/104904.yaml @@ -0,0 +1,5 @@ +pr: 104904 +summary: Improve `CANNOT_REBALANCE_CAN_ALLOCATE` explanation +area: Allocation +type: bug +issues: [] diff --git a/docs/changelog/104909.yaml b/docs/changelog/104909.yaml new file mode 100644 index 0000000000000..6d250c22a745a --- /dev/null +++ b/docs/changelog/104909.yaml @@ -0,0 +1,5 @@ +pr: 104909 +summary: "[Connectors API] Relax strict response parsing for get/list operations" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/104911.yaml b/docs/changelog/104911.yaml new file mode 100644 index 0000000000000..17a335337e345 --- /dev/null +++ b/docs/changelog/104911.yaml @@ -0,0 +1,7 @@ +pr: 104911 +summary: "ES|QL: Improve type validation in aggs for UNSIGNED_LONG better support\ + \ for VERSION" +area: ES|QL +type: bug +issues: + - 102961 diff --git a/docs/changelog/104927.yaml b/docs/changelog/104927.yaml new file mode 100644 index 0000000000000..e0e098ba10b7b --- /dev/null +++ b/docs/changelog/104927.yaml @@ -0,0 +1,5 @@ +pr: 104927 +summary: Adding `ActionRequestLazyBuilder` implementation of `RequestBuilder` +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/99961.yaml b/docs/changelog/99961.yaml new file mode 100644 index 0000000000000..457f7801ce218 --- /dev/null +++ b/docs/changelog/99961.yaml @@ -0,0 +1,6 @@ +pr: 99961 +summary: "ESQL: Correct out-of-range filter pushdowns" +area: ES|QL +type: bug +issues: + - 99960 diff --git a/docs/reference/esql/functions/avg.asciidoc b/docs/reference/esql/functions/avg.asciidoc index 9a6f5a82d1959..7eadff29f1bfc 100644 --- a/docs/reference/esql/functions/avg.asciidoc +++ b/docs/reference/esql/functions/avg.asciidoc @@ -10,7 +10,9 @@ AVG(expression) ---- `expression`:: -Numeric expression. If `null`, the function returns `null`. +Numeric expression. +//If `null`, the function returns `null`. +// TODO: Remove comment when https://github.com/elastic/elasticsearch/issues/104900 is fixed. *Description* @@ -20,7 +22,7 @@ The average of a numeric expression. The result is always a `double` no matter the input type. -*Example* +*Examples* [source.merge.styled,esql] ---- @@ -30,3 +32,16 @@ include::{esql-specs}/stats.csv-spec[tag=avg] |=== include::{esql-specs}/stats.csv-spec[tag=avg-result] |=== + +The expression can use inline functions. For example, to calculate the average +over a multivalued column, first use `MV_AVG` to average the multiple values per +row, and use the result with the `AVG` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsAvgNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsAvgNestedExpression-result] +|=== diff --git a/docs/reference/esql/functions/count-distinct.asciidoc b/docs/reference/esql/functions/count-distinct.asciidoc index 04a200935cd48..a9f30d24e0e83 100644 --- a/docs/reference/esql/functions/count-distinct.asciidoc +++ b/docs/reference/esql/functions/count-distinct.asciidoc @@ -6,13 +6,13 @@ [source,esql] ---- -COUNT_DISTINCT(column[, precision_threshold]) +COUNT_DISTINCT(expression[, precision_threshold]) ---- *Parameters* -`column`:: -Column for which to count the number of distinct values. +`expression`:: +Expression that outputs the values on which to perform a distinct count. `precision_threshold`:: Precision threshold. Refer to <>. The @@ -23,29 +23,6 @@ same effect as a threshold of 40000. The default value is 3000. Returns the approximate number of distinct values. -[discrete] -[[esql-agg-count-distinct-approximate]] -==== Counts are approximate - -Computing exact counts requires loading values into a set and returning its -size. This doesn't scale when working on high-cardinality sets and/or large -values as the required memory usage and the need to communicate those -per-shard sets between nodes would utilize too many resources of the cluster. - -This `COUNT_DISTINCT` function is based on the -https://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf[HyperLogLog++] -algorithm, which counts based on the hashes of the values with some interesting -properties: - -include::../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] - -The `COUNT_DISTINCT` function takes an optional second parameter to configure -the precision threshold. The precision_threshold options allows to trade memory -for accuracy, and defines a unique count below which counts are expected to be -close to accurate. Above this value, counts might become a bit more fuzzy. The -maximum supported value is 40000, thresholds above this number will have the -same effect as a threshold of 40000. The default value is `3000`. - *Supported types* Can take any field type as input. @@ -71,3 +48,38 @@ include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision |=== include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision-result] |=== + +The expression can use inline functions. This example splits a string into +multiple values using the `SPLIT` function and counts the unique values: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression-result] +|=== + +[discrete] +[[esql-agg-count-distinct-approximate]] +==== Counts are approximate + +Computing exact counts requires loading values into a set and returning its +size. This doesn't scale when working on high-cardinality sets and/or large +values as the required memory usage and the need to communicate those +per-shard sets between nodes would utilize too many resources of the cluster. + +This `COUNT_DISTINCT` function is based on the +https://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf[HyperLogLog++] +algorithm, which counts based on the hashes of the values with some interesting +properties: + +include::../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] + +The `COUNT_DISTINCT` function takes an optional second parameter to configure +the precision threshold. The precision_threshold options allows to trade memory +for accuracy, and defines a unique count below which counts are expected to be +close to accurate. Above this value, counts might become a bit more fuzzy. The +maximum supported value is 40000, thresholds above this number will have the +same effect as a threshold of 40000. The default value is `3000`. \ No newline at end of file diff --git a/docs/reference/esql/functions/count.asciidoc b/docs/reference/esql/functions/count.asciidoc index 70b13d7fc16b3..38732336413ad 100644 --- a/docs/reference/esql/functions/count.asciidoc +++ b/docs/reference/esql/functions/count.asciidoc @@ -6,14 +6,14 @@ [source,esql] ---- -COUNT([input]) +COUNT([expression]) ---- *Parameters* -`input`:: -Column or literal for which to count the number of values. If omitted, returns a -count all (the number of rows). +`expression`:: +Expression that outputs values to be counted. +If omitted, equivalent to `COUNT(*)` (the number of rows). *Description* @@ -44,3 +44,15 @@ include::{esql-specs}/docs.csv-spec[tag=countAll] |=== include::{esql-specs}/docs.csv-spec[tag=countAll-result] |=== + +The expression can use inline functions. This example splits a string into +multiple values using the `SPLIT` function and counts the values: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsCountWithExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsCountWithExpression-result] +|=== diff --git a/docs/reference/esql/functions/max.asciidoc b/docs/reference/esql/functions/max.asciidoc index 4bc62de341d9d..f2e0d0a0205b3 100644 --- a/docs/reference/esql/functions/max.asciidoc +++ b/docs/reference/esql/functions/max.asciidoc @@ -6,17 +6,17 @@ [source,esql] ---- -MAX(column) +MAX(expression) ---- *Parameters* -`column`:: -Column from which to return the maximum value. +`expression`:: +Expression from which to return the maximum value. *Description* -Returns the maximum value of a numeric column. +Returns the maximum value of a numeric expression. *Example* @@ -28,3 +28,16 @@ include::{esql-specs}/stats.csv-spec[tag=max] |=== include::{esql-specs}/stats.csv-spec[tag=max-result] |=== + +The expression can use inline functions. For example, to calculate the maximum +over an average of a multivalued column, use `MV_AVG` to first average the +multiple values per row, and use the result with the `MAX` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsMaxNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsMaxNestedExpression-result] +|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/median-absolute-deviation.asciidoc b/docs/reference/esql/functions/median-absolute-deviation.asciidoc index 301d344489643..796e0797157de 100644 --- a/docs/reference/esql/functions/median-absolute-deviation.asciidoc +++ b/docs/reference/esql/functions/median-absolute-deviation.asciidoc @@ -6,13 +6,13 @@ [source,esql] ---- -MEDIAN_ABSOLUTE_DEVIATION(column) +MEDIAN_ABSOLUTE_DEVIATION(expression) ---- *Parameters* -`column`:: -Column from which to return the median absolute deviation. +`expression`:: +Expression from which to return the median absolute deviation. *Description* @@ -44,3 +44,17 @@ include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation] |=== include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation-result] |=== + +The expression can use inline functions. For example, to calculate the the +median absolute deviation of the maximum values of a multivalued column, first +use `MV_MAX` to get the maximum value per row, and use the result with the +`MEDIAN_ABSOLUTE_DEVIATION` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMADNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMADNestedExpression-result] +|=== diff --git a/docs/reference/esql/functions/median.asciidoc b/docs/reference/esql/functions/median.asciidoc index 17b51d9c50b26..ef845aafd3915 100644 --- a/docs/reference/esql/functions/median.asciidoc +++ b/docs/reference/esql/functions/median.asciidoc @@ -6,13 +6,13 @@ [source,esql] ---- -MEDIAN(column) +MEDIAN(expression) ---- *Parameters* -`column`:: -Column from which to return the median value. +`expression`:: +Expression from which to return the median value. *Description* @@ -37,3 +37,16 @@ include::{esql-specs}/stats_percentile.csv-spec[tag=median] |=== include::{esql-specs}/stats_percentile.csv-spec[tag=median-result] |=== + +The expression can use inline functions. For example, to calculate the median of +the maximum values of a multivalued column, first use `MV_MAX` to get the +maximum value per row, and use the result with the `MEDIAN` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression-result] +|=== diff --git a/docs/reference/esql/functions/min.asciidoc b/docs/reference/esql/functions/min.asciidoc index b95efbbc6b3a5..313822818128c 100644 --- a/docs/reference/esql/functions/min.asciidoc +++ b/docs/reference/esql/functions/min.asciidoc @@ -6,17 +6,17 @@ [source,esql] ---- -MIN(column) +MIN(expression) ---- *Parameters* -`column`:: -Column from which to return the minimum value. +`expression`:: +Expression from which to return the minimum value. *Description* -Returns the minimum value of a numeric column. +Returns the minimum value of a numeric expression. *Example* @@ -28,3 +28,16 @@ include::{esql-specs}/stats.csv-spec[tag=min] |=== include::{esql-specs}/stats.csv-spec[tag=min-result] |=== + +The expression can use inline functions. For example, to calculate the minimum +over an average of a multivalued column, use `MV_AVG` to first average the +multiple values per row, and use the result with the `MIN` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsMinNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsMinNestedExpression-result] +|=== diff --git a/docs/reference/esql/functions/percentile.asciidoc b/docs/reference/esql/functions/percentile.asciidoc index ab3f14af70486..e00ee436c31cf 100644 --- a/docs/reference/esql/functions/percentile.asciidoc +++ b/docs/reference/esql/functions/percentile.asciidoc @@ -6,13 +6,13 @@ [source,esql] ---- -PERCENTILE(column, percentile) +PERCENTILE(expression, percentile) ---- *Parameters* -`column`:: -Column to convert from multiple values to single value. +`expression`:: +Expression from which to return a percentile. `percentile`:: A constant numeric expression. @@ -23,18 +23,6 @@ Returns the value at which a certain percentage of observed values occur. For example, the 95th percentile is the value which is greater than 95% of the observed values and the 50th percentile is the <>. -[discrete] -[[esql-agg-percentile-approximate]] -==== `PERCENTILE` is (usually) approximate - -include::../../aggregations/metrics/percentile-aggregation.asciidoc[tag=approximate] - -[WARNING] -==== -`PERCENTILE` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. -This means you can get slightly different results using the same data. -==== - *Example* [source.merge.styled,esql] @@ -45,3 +33,28 @@ include::{esql-specs}/stats_percentile.csv-spec[tag=percentile] |=== include::{esql-specs}/stats_percentile.csv-spec[tag=percentile-result] |=== + +The expression can use inline functions. For example, to calculate a percentile +of the maximum values of a multivalued column, first use `MV_MAX` to get the +maximum value per row, and use the result with the `PERCENTILE` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsPercentileNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsPercentileNestedExpression-result] +|=== + +[discrete] +[[esql-agg-percentile-approximate]] +==== `PERCENTILE` is (usually) approximate + +include::../../aggregations/metrics/percentile-aggregation.asciidoc[tag=approximate] + +[WARNING] +==== +`PERCENTILE` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. +This means you can get slightly different results using the same data. +==== \ No newline at end of file diff --git a/docs/reference/esql/functions/sum.asciidoc b/docs/reference/esql/functions/sum.asciidoc index e88ebbeb3c771..efe65d5503ec6 100644 --- a/docs/reference/esql/functions/sum.asciidoc +++ b/docs/reference/esql/functions/sum.asciidoc @@ -6,15 +6,15 @@ [source,esql] ---- -SUM(column) +SUM(expression) ---- -`column`:: -Numeric column. +`expression`:: +Numeric expression. *Description* -Returns the sum of a numeric column. +Returns the sum of a numeric expression. *Example* @@ -26,3 +26,16 @@ include::{esql-specs}/stats.csv-spec[tag=sum] |=== include::{esql-specs}/stats.csv-spec[tag=sum-result] |=== + +The expression can use inline functions. For example, to calculate +the sum of each employee's maximum salary changes, apply the +`MV_MAX` function to each row and then sum the results: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsSumNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsSumNestedExpression-result] +|=== diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index a34bc444578d6..fe84c56bbfc19 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -6,7 +6,8 @@ [source,esql] ---- -STATS [column1 =] expression1[, ..., [columnN =] expressionN] [BY grouping_column1[, ..., grouping_columnN]] +STATS [column1 =] expression1[, ..., [columnN =] expressionN] +[BY grouping_expression1[, ..., grouping_expressionN]] ---- *Parameters* @@ -18,8 +19,10 @@ equal to the corresponding expression (`expressionX`). `expressionX`:: An expression that computes an aggregated value. -`grouping_columnX`:: -The column containing the values to group by. +`grouping_expressionX`:: +An expression that outputs the values to group by. + +NOTE: Individual `null` values are skipped when computing aggregations. *Description* @@ -28,14 +31,14 @@ and calculate one or more aggregated values over the grouped rows. If `BY` is omitted, the output table contains exactly one row with the aggregations applied over the entire dataset. -The following aggregation functions are supported: +The following <> are supported: include::../functions/aggregation-functions.asciidoc[tag=agg_list] NOTE: `STATS` without any groups is much much faster than adding a group. -NOTE: Grouping on a single column is currently much more optimized than grouping - on many columns. In some tests we have seen grouping on a single `keyword` +NOTE: Grouping on a single expression is currently much more optimized than grouping + on many expressions. In some tests we have seen grouping on a single `keyword` column to be five times faster than grouping on two `keyword` columns. Do not try to work around this by combining the two columns together with something like <> and then grouping - that is not going to be @@ -68,10 +71,14 @@ include::{esql-specs}/stats.csv-spec[tag=statsWithoutBy-result] It's possible to calculate multiple values: -[source,esql] +[source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result] +|=== It's also possible to group by multiple values (only supported for long and keyword family fields): @@ -81,6 +88,33 @@ keyword family fields): include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues] ---- +Both the aggregating functions and the grouping expressions accept other +functions. This is useful for using `STATS...BY` on multivalue columns. +For example, to calculate the average salary change, you can use `MV_AVG` to +first average the multiple values per employee, and use the result with the +`AVG` function: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsAvgNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsAvgNestedExpression-result] +|=== + +An example of grouping by an expression is grouping employees on the first +letter of their last name: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsByExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsByExpression-result] +|=== + Specifying the output column name is optional. If not specified, the new column name is equal to the expression. The following query returns a column named `AVG(salary)`: diff --git a/docs/reference/release-notes/8.12.0.asciidoc b/docs/reference/release-notes/8.12.0.asciidoc index 267f00192ecdc..4c0fc50584b9f 100644 --- a/docs/reference/release-notes/8.12.0.asciidoc +++ b/docs/reference/release-notes/8.12.0.asciidoc @@ -12,6 +12,8 @@ Also see <>. When using `int8_hnsw` and the default `confidence_interval` (or any `confidence_interval` less than `1.0`) and when there are deleted documents in the segments, quantiles may fail to build and prevent merging. +This issue is fixed in 8.12.1. + [[breaking-8.12.0]] [float] === Breaking changes diff --git a/docs/reference/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc index aedd65de76e5d..94b632490ad86 100644 --- a/docs/reference/rest-api/security.asciidoc +++ b/docs/reference/rest-api/security.asciidoc @@ -188,6 +188,7 @@ include::security/get-role-mappings.asciidoc[] include::security/get-roles.asciidoc[] include::security/get-service-accounts.asciidoc[] include::security/get-service-credentials.asciidoc[] +include::security/get-settings.asciidoc[] include::security/get-tokens.asciidoc[] include::security/get-user-privileges.asciidoc[] @@ -202,6 +203,7 @@ include::security/oidc-logout-api.asciidoc[] include::security/query-api-key.asciidoc[] include::security/query-user.asciidoc[] include::security/update-api-key.asciidoc[] +include::security/update-settings.asciidoc[] include::security/bulk-update-api-keys.asciidoc[] include::security/saml-prepare-authentication-api.asciidoc[] include::security/saml-authenticate-api.asciidoc[] diff --git a/docs/reference/rest-api/security/get-settings.asciidoc b/docs/reference/rest-api/security/get-settings.asciidoc index d402c74b5c46b..5c38b96903cbd 100644 --- a/docs/reference/rest-api/security/get-settings.asciidoc +++ b/docs/reference/rest-api/security/get-settings.asciidoc @@ -5,17 +5,21 @@ Get Security settings ++++ +[[security-api-get-settings-prereqs]] ==== {api-prereq-title} * To use this API, you must have at least the `read_security` cluster privilege. +[[security-api-get-settings-desc]] ==== {api-description-title} -This API allows a user to retrieve the user-configurable settings for the Security internal index (`.security` and associated indices). Only a subset of the index settings — those that are user-configurable—will be shown. This includes: +This API allows a user to retrieve the user-configurable settings for the +Security internal index (`.security` and associated indices). Only a subset of +the index settings — those that are user-configurable—will be shown. This includes: - `index.auto_expand_replicas` - `index.number_of_replicas` -An example of retrieving the Security settings: +An example of retrieving the security settings: [source,console] ----------------------------------------------------------- @@ -24,4 +28,5 @@ GET /_security/settings // TEST[setup:user_profiles] // TEST[setup:service_token42] -The configurable settings can be modified using the <> API. +The configurable settings can be modified using the +<> API. diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index a08a8fd1858b6..394464dc21456 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -52,12 +52,20 @@ You can specify the following parameters in the request body: (Optional, string) A <> to filter which API keys to return. The query supports a subset of query types, including <>, <>, -<>, <>, <>, -<>, <>, <>, -<>, and <> +<>, <>, +<>, <>, +<>, <>, +<>, <>, +and <> + You can query the following public values associated with an API key. + +NOTE: The queryable string values associated with API keys are internally mapped as <>. +Consequently, if no <> parameter is specified for a +<> query, then the provided match query string is interpreted as +a single keyword value. Such a <> query is hence equivalent to a +<> query. ++ .Valid values for `query` [%collapsible%open] ==== diff --git a/docs/reference/rest-api/security/update-settings.asciidoc b/docs/reference/rest-api/security/update-settings.asciidoc index 525b297123c31..0ea41d86e85ed 100644 --- a/docs/reference/rest-api/security/update-settings.asciidoc +++ b/docs/reference/rest-api/security/update-settings.asciidoc @@ -5,12 +5,16 @@ Update Security settings ++++ +[[security-api-update-settings-prereqs]] ==== {api-prereq-title} * To use this API, you must have at least the `manage_security` cluster privilege. +[[security-api-update-settings-desc]] ==== {api-description-title} -This API allows a user to modify the settings for the Security internal indices (`.security` and associated indices). Only a subset of settings are allowed to be modified. This includes: +This API allows a user to modify the settings for the Security internal indices +(`.security` and associated indices). Only a subset of settings are allowed to +be modified. This includes: - `index.auto_expand_replicas` - `index.number_of_replicas` @@ -34,17 +38,23 @@ PUT /_security/settings ----------------------------------------------------------- // TEST[skip:making sure all the indices have been created reliably is difficult] -The configured settings can be retrieved using the <> API. If a -given index is not in use on the system, but settings are provided for it, the request will be rejected - this API does -not yet support configuring the settings for these indices before they are in use. +The configured settings can be retrieved using the +<> API. If a given index +is not in use on the system, but settings are provided for it, the request will +be rejected - this API does not yet support configuring the settings for these +indices before they are in use. + ==== {api-request-body-title} + `security`:: -(Optional, object) Settings to be used for the index used for most security configuration, including Native realm users -and roles configured via the API. +(Optional, object) Settings to be used for the index used for most security +configuration, including Native realm users and roles configured via the API. `security-tokens`:: -(Optional, object) Settings to be used for the index used to store <>. +(Optional, object) Settings to be used for the index used to store +<>. `security`:: -(Optional, object) Settings to be used for the index used to store <> information. +(Optional, object) Settings to be used for the index used to store +<> information. diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 7fbdecc0aebce..249fddce9c416 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -4,9 +4,9 @@ Semantic search with the {infer} API ++++ -The instructions in this tutorial shows you how to use the {infer} API with the -Open AI service to perform semantic search on your data. The following example -uses OpenAI's `text-embedding-ada-002` second generation embedding model. You +The instructions in this tutorial shows you how to use the {infer} API with the +Open AI service to perform semantic search on your data. The following example +uses OpenAI's `text-embedding-ada-002` second generation embedding model. You can use any OpenAI models, they are all supported by the {infer} API. @@ -14,8 +14,8 @@ can use any OpenAI models, they are all supported by the {infer} API. [[infer-openai-requirements]] ==== Requirements -An https://openai.com/[OpenAI account] is required to use the {infer} API with -the OpenAI service. +An https://openai.com/[OpenAI account] is required to use the {infer} API with +the OpenAI service. [discrete] @@ -39,13 +39,13 @@ PUT _inference/text_embedding/openai_embeddings <1> ------------------------------------------------------------ // TEST[skip:TBD] <1> The task type is `text_embedding` in the path. -<2> The API key of your OpenAI account. You can find your OpenAI API keys in -your OpenAI account under the -https://platform.openai.com/api-keys[API keys section]. You need to provide -your API key only once. The <> does not return your API +<2> The API key of your OpenAI account. You can find your OpenAI API keys in +your OpenAI account under the +https://platform.openai.com/api-keys[API keys section]. You need to provide +your API key only once. The <> does not return your API key. -<3> The name of the embedding model to use. You can find the list of OpenAI -embedding models +<3> The name of the embedding model to use. You can find the list of OpenAI +embedding models https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. @@ -53,9 +53,9 @@ https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. [[infer-openai-mappings]] ==== Create the index mapping -The mapping of the destination index - the index that contains the embeddings -that the model will create based on your input text - must be created. The -destination index must have a field with the <> +The mapping of the destination index - the index that contains the embeddings +that the model will create based on your input text - must be created. The +destination index must have a field with the <> field type to index the output of the OpenAI model. [source,console] @@ -67,7 +67,7 @@ PUT openai-embeddings "content_embedding": { <1> "type": "dense_vector", <2> "dims": 1536, <3> - "element_type": "byte", + "element_type": "float", "similarity": "dot_product" <4> }, "content": { <5> @@ -80,15 +80,15 @@ PUT openai-embeddings <1> The name of the field to contain the generated tokens. It must be refrenced in the {infer} pipeline configuration in the next step. <2> The field to contain the tokens is a `dense_vector` field. -<3> The output dimensions of the model. Find this value in the -https://platform.openai.com/docs/guides/embeddings/embedding-models[OpenAI documentation] +<3> The output dimensions of the model. Find this value in the +https://platform.openai.com/docs/guides/embeddings/embedding-models[OpenAI documentation] of the model you use. -<4> The faster` dot_product` function can be used to calculate similarity -because OpenAI embeddings are normalised to unit length. You can check the +<4> The faster` dot_product` function can be used to calculate similarity +because OpenAI embeddings are normalised to unit length. You can check the https://platform.openai.com/docs/guides/embeddings/which-distance-function-should-i-use[OpenAI docs] -about which similarity function to use. +about which similarity function to use. <5> The name of the field from which to create the sparse vector representation. -In this example, the name of the field is `content`. It must be referenced in +In this example, the name of the field is `content`. It must be referenced in the {infer} pipeline configuration in the next step. <6> The field type which is text in this example. @@ -98,8 +98,8 @@ the {infer} pipeline configuration in the next step. ==== Create an ingest pipeline with an inference processor Create an <> with an -<> and use the OpenAI model you created -above to infer against the data that is being ingested in the +<> and use the OpenAI model you created +above to infer against the data that is being ingested in the pipeline. [source,console] @@ -119,8 +119,8 @@ PUT _ingest/pipeline/openai_embeddings ] } -------------------------------------------------- -<1> The name of the inference model you created by using the -<>. +<1> The name of the inference model you created by using the +<>. <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. @@ -179,9 +179,9 @@ POST _reindex?wait_for_completion=false number makes the update of the reindexing process quicker which enables you to follow the progress closely and detect errors early. -NOTE: The -https://platform.openai.com/account/limits[rate limit of your OpenAI account] -may affect the throughput of the reindexing process. If this happens, change +NOTE: The +https://platform.openai.com/account/limits[rate limit of your OpenAI account] +may affect the throughput of the reindexing process. If this happens, change `size` to `3` or a similar value in magnitude. The call returns a task ID to monitor the progress: @@ -192,7 +192,7 @@ GET _tasks/ ---- // TEST[skip:TBD] -You can also cancel the reindexing process if you don't want to wait until the +You can also cancel the reindexing process if you don't want to wait until the reindexing process is fully complete which might take hours: [source,console] @@ -206,12 +206,12 @@ POST _tasks//_cancel [[infer-semantic-search]] ==== Semantic search -After the dataset has been enriched with the embeddings, you can query the data +After the dataset has been enriched with the embeddings, you can query the data using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a `query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and provide the query text and the model you have used to create the embeddings. -NOTE: If you cancelled the reindexing process, you run the query only a part of +NOTE: If you cancelled the reindexing process, you run the query only a part of the data which affects the quality of your results. [source,console] @@ -237,7 +237,7 @@ GET openai-embeddings/_search -------------------------------------------------- // TEST[skip:TBD] -As a result, you receive the top 10 documents that are closest in meaning to the +As a result, you receive the top 10 documents that are closest in meaning to the query from the `openai-embeddings` index sorted by their proximity to the query: [source,consol-result] diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java index 56fff226114f8..242da6fd705dd 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java @@ -273,7 +273,7 @@ public static XContentType fromMediaType(String mediaTypeHeaderValue) throws Ill return null; } - private int index; + private final int index; XContentType(int index) { this.index = index; @@ -315,4 +315,10 @@ public ParsedMediaType toParsedMediaType() { public XContentType canonical() { return this; } + + private static final XContentType[] values = values(); + + public static XContentType ofOrdinal(int ordinal) { + return values[ordinal]; + } } diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java index 2050ce20b1aee..917d8f0b80f2c 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java @@ -22,8 +22,8 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception { public void testStandAloneTimeSeriesAgg() { assertNoFailuresAndResponse(prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); assertThat( @@ -203,7 +203,7 @@ public void testTimeSeriesGroupedByADimension() { .subAggregation(timeSeries("by_ts")) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Terms terms = aggregations.get("by_dim"); Set> keys = new HashSet<>(); @@ -236,7 +236,7 @@ public void testTimeSeriesGroupedByDateHistogram() { .subAggregation(timeSeries("by_ts").subAggregation(stats("timestamp").field("@timestamp"))) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Histogram histogram = aggregations.get("by_time"); Map, Long> keys = new HashMap<>(); @@ -275,7 +275,7 @@ public void testStandAloneTimeSeriesAggWithDimFilter() { assertNoFailuresAndResponse( prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); @@ -308,7 +308,7 @@ public void testStandAloneTimeSeriesAggWithGlobalAggregation() { .addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric))) .addAggregation(PipelineAggregatorBuilders.sumBucket("total_filter_sum", "by_ts>filter_sum")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); @@ -353,7 +353,7 @@ public void testStandAloneTimeSeriesAggWithMetricFilter() { assertNoFailuresAndResponse( prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalTimeSeries timeSeries = aggregations.get("by_ts"); Map, Map>> filteredData = dataFilteredByMetric( diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java index 89d445903f8cc..91aba020b8856 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java @@ -69,7 +69,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits; } final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toCollection(ArrayList::new)); aggs.add(new Derivative(name(), gradient, xDiff, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/modules/apm/METERING.md b/modules/apm/METERING.md index 49b365e135e2b..5347d2647a9ae 100644 --- a/modules/apm/METERING.md +++ b/modules/apm/METERING.md @@ -106,7 +106,7 @@ rootProject { afterEvaluate { testClusters.matching { it.name == "runTask" }.configureEach { setting 'xpack.security.audit.enabled', 'true' - keystore 'tracing.apm.secret_token', 'TODO-REPLACE' + keystore 'telemetry.secret_token', 'TODO-REPLACE' setting 'telemetry.metrics.enabled', 'true' setting 'telemetry.agent.server_url', 'https://TODO-REPLACE-URL.apm.eastus2.staging.azure.foundit.no:443' } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index d3ec2e2984013..bf3f01bd2052f 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -33,7 +33,7 @@ * programmatically attach the agent, the Security Manager permissions required for this * make this approach difficult to the point of impossibility. *

- * All settings are found under the tracing.apm. prefix. Any setting under + * All settings are found under the telemetry. prefix. Any setting under * the telemetry.agent. prefix will be forwarded on to the APM Java agent * by setting appropriate system properties. Some settings can only be set once, and must be * set when the agent starts. We therefore also create and configure a config file in @@ -64,14 +64,14 @@ public TelemetryProvider getTelemetryProvider(Settings settings) { @Override public Collection createComponents(PluginServices services) { final APMTracer apmTracer = telemetryProvider.get().getTracer(); + final APMMeterService apmMeter = telemetryProvider.get().getMeterService(); apmTracer.setClusterName(services.clusterService().getClusterName().value()); apmTracer.setNodeName(services.clusterService().getNodeName()); final APMAgentSettings apmAgentSettings = new APMAgentSettings(); - apmAgentSettings.syncAgentSystemProperties(settings); - final APMMeterService apmMeter = new APMMeterService(settings); - apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); + apmAgentSettings.initAgentSystemProperties(settings); + apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get()); logger.info("Sending apm metrics is {}", APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); logger.info("Sending apm tracing is {}", APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 3eba5bc98aaf5..88359d32a628c 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -37,23 +37,25 @@ public class APMAgentSettings { private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class); - public void addClusterSettingsListeners( - ClusterService clusterService, - APMTelemetryProvider apmTelemetryProvider, - APMMeterService apmMeterService - ) { + public void addClusterSettingsListeners(ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final APMTracer apmTracer = apmTelemetryProvider.getTracer(); + final APMMeterService apmMeterService = apmTelemetryProvider.getMeterService(); clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); this.setAgentSetting("instrument", Boolean.toString(enabled)); + // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to + // minimise its impact to a running Elasticsearch. + boolean recording = enabled || clusterSettings.get(TELEMETRY_METRICS_ENABLED_SETTING); + this.setAgentSetting("recording", Boolean.toString(recording)); }); clusterSettings.addSettingsUpdateConsumer(TELEMETRY_METRICS_ENABLED_SETTING, enabled -> { apmMeterService.setEnabled(enabled); // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. - this.setAgentSetting("recording", Boolean.toString(enabled)); + boolean recording = enabled || clusterSettings.get(TELEMETRY_TRACING_ENABLED_SETTING); + this.setAgentSetting("recording", Boolean.toString(recording)); }); clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, apmTracer::setIncludeNames); clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, apmTracer::setExcludeNames); @@ -62,11 +64,16 @@ public void addClusterSettingsListeners( } /** - * Copies APM settings from the provided settings object into the corresponding system properties. + * Initialize APM settings from the provided settings object into the corresponding system properties. + * Later updates to these settings are synchronized using update consumers. * @param settings the settings to apply */ - public void syncAgentSystemProperties(Settings settings) { - this.setAgentSetting("recording", Boolean.toString(TELEMETRY_TRACING_ENABLED_SETTING.get(settings))); + public void initAgentSystemProperties(Settings settings) { + boolean tracing = TELEMETRY_TRACING_ENABLED_SETTING.get(settings); + boolean metrics = TELEMETRY_METRICS_ENABLED_SETTING.get(settings); + + this.setAgentSetting("recording", Boolean.toString(tracing || metrics)); + this.setAgentSetting("instrument", Boolean.toString(tracing)); // Apply values from the settings in the cluster state APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); } @@ -113,7 +120,7 @@ public void setAgentSetting(String key, String value) { // Core: // forbid 'enabled', must remain enabled to dynamically enable tracing / metrics - // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'tracing.apm.enabled' + // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'telemetry.tracing.enabled' "service_name", "service_node_name", // forbid 'service_version', forced by APMJvmOptions @@ -200,8 +207,8 @@ public void setAgentSetting(String key, String value) { "profiling_inferred_spans_lib_directory", // Reporter: - // forbid secret_token: use tracing.apm.secret_token instead - // forbid api_key: use tracing.apm.api_key instead + // forbid secret_token: use telemetry.secret_token instead + // forbid api_key: use telemetry.api_key instead "server_url", "server_urls", "disable_send", diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java index 21f0b8491f644..ae1204e75af1a 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java @@ -49,7 +49,7 @@ public APMMeterRegistry getMeterRegistry() { } /** - * @see APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider, APMMeterService) + * @see APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider) */ void setEnabled(boolean enabled) { this.enabled = enabled; diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java index 5b78c2f5f6a3c..d7b061b4b0d19 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java @@ -14,12 +14,10 @@ import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; public class APMTelemetryProvider implements TelemetryProvider { - private final Settings settings; private final APMTracer apmTracer; private final APMMeterService apmMeterService; public APMTelemetryProvider(Settings settings) { - this.settings = settings; apmTracer = new APMTracer(settings); apmMeterService = new APMMeterService(settings); } @@ -29,6 +27,10 @@ public APMTracer getTracer() { return apmTracer; } + public APMMeterService getMeterService() { + return apmMeterService; + } + @Override public APMMeterRegistry getMeterRegistry() { return apmMeterService.getMeterRegistry(); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index 52607a79fe69d..d7ae93aded3de 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -8,81 +8,190 @@ package org.elasticsearch.telemetry.apm.internal; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; import java.util.List; - +import java.util.Set; + +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_AGENT_SETTINGS; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_API_KEY_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_API_KEY_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_ENABLED_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES; +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; +import static org.mockito.Mockito.clearInvocations; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class APMAgentSettingsTests extends ESTestCase { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + APMTelemetryProvider apmTelemetryProvider = mock(Mockito.RETURNS_DEEP_STUBS); /** * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. */ public void testEnableTracing() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); - apmAgentSettings.syncAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); + for (boolean metricsEnabled : List.of(true, false)) { + clearInvocations(apmAgentSettings, apmTelemetryProvider.getTracer()); + + Settings update = Settings.builder() + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), metricsEnabled) + .build(); + apmAgentSettings.initAgentSystemProperties(update); + + verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmAgentSettings).setAgentSetting("instrument", "true"); + clearInvocations(apmAgentSettings); + + Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); + triggerUpdateConsumer(initial, update); + verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmAgentSettings).setAgentSetting("instrument", "true"); + verify(apmTelemetryProvider.getTracer()).setEnabled(true); + } } public void testEnableTracingUsingLegacySetting() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_ENABLED_SETTING.getKey(), true).build(); - apmAgentSettings.syncAgentSystemProperties(settings); + Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), true).build(); + apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmAgentSettings).setAgentSetting("instrument", "true"); + } + + public void testEnableMetrics() { + for (boolean tracingEnabled : List.of(true, false)) { + clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); + + Settings update = Settings.builder() + .put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true) + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), tracingEnabled) + .build(); + apmAgentSettings.initAgentSystemProperties(update); + + verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled)); + clearInvocations(apmAgentSettings); + + Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build(); + triggerUpdateConsumer(initial, update); + verify(apmAgentSettings).setAgentSetting("recording", "true"); + verify(apmTelemetryProvider.getMeterService()).setEnabled(true); + } } /** - * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. + * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false unless metrics are enabled. */ public void testDisableTracing() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); - apmAgentSettings.syncAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "false"); + for (boolean metricsEnabled : List.of(true, false)) { + clearInvocations(apmAgentSettings, apmTelemetryProvider.getTracer()); + + Settings update = Settings.builder() + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false) + .put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), metricsEnabled) + .build(); + apmAgentSettings.initAgentSystemProperties(update); + + verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled)); + verify(apmAgentSettings).setAgentSetting("instrument", "false"); + clearInvocations(apmAgentSettings); + + Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); + triggerUpdateConsumer(initial, update); + verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled)); + verify(apmAgentSettings).setAgentSetting("instrument", "false"); + verify(apmTelemetryProvider.getTracer()).setEnabled(false); + } } public void testDisableTracingUsingLegacySetting() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_ENABLED_SETTING.getKey(), false).build(); - apmAgentSettings.syncAgentSystemProperties(settings); + Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), false).build(); + apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "false"); + verify(apmAgentSettings).setAgentSetting("instrument", "false"); + } + + public void testDisableMetrics() { + for (boolean tracingEnabled : List.of(true, false)) { + clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); + + Settings update = Settings.builder() + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), tracingEnabled) + .put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false) + .build(); + apmAgentSettings.initAgentSystemProperties(update); + + verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled)); + verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled)); + clearInvocations(apmAgentSettings); + + Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + triggerUpdateConsumer(initial, update); + verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled)); + verify(apmTelemetryProvider.getMeterService()).setEnabled(false); + } + } + + private void triggerUpdateConsumer(Settings initial, Settings update) { + ClusterService clusterService = mock(); + ClusterSettings clusterSettings = new ClusterSettings( + initial, + Set.of( + TELEMETRY_TRACING_ENABLED_SETTING, + TELEMETRY_METRICS_ENABLED_SETTING, + TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, + TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, + TELEMETRY_TRACING_SANITIZE_FIELD_NAMES, + APM_AGENT_SETTINGS + ) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + apmAgentSettings.addClusterSettingsListeners(clusterService, apmTelemetryProvider); + clusterSettings.applySettings(update); } /** * Check that when cluster settings are synchronised with the system properties, agent settings are set. */ public void testSetAgentSettings() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder() - .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) - .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .put(APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") .build(); - apmAgentSettings.syncAgentSystemProperties(settings); + apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "true"); verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } public void testSetAgentsSettingsWithLegacyPrefix() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder() - .put(APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) + .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) .put("tracing.apm.agent.span_compression_enabled", "true") .build(); - apmAgentSettings.syncAgentSystemProperties(settings); + apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "true"); verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); @@ -92,57 +201,54 @@ public void testSetAgentsSettingsWithLegacyPrefix() { * Check that invalid or forbidden APM agent settings are rejected. */ public void testRejectForbiddenOrUnknownAgentSettings() { - List prefixes = List.of(APMAgentSettings.APM_AGENT_SETTINGS.getKey(), "tracing.apm.agent."); + List prefixes = List.of(APM_AGENT_SETTINGS.getKey(), "tracing.apm.agent."); for (String prefix : prefixes) { Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); - Exception exception = expectThrows( - IllegalArgumentException.class, - () -> APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings) - ); + Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); } // though, accept / ignore nested global_labels for (String prefix : prefixes) { Settings settings = Settings.builder().put(prefix + "global_labels." + randomAlphaOfLength(5), "123").build(); - APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings); + APM_AGENT_SETTINGS.getAsMap(settings); } } public void testTelemetryTracingNamesIncludeFallback() { - Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING.getKey(), "abc,xyz").build(); + Settings settings = Settings.builder().put(TRACING_APM_NAMES_INCLUDE_SETTING.getKey(), "abc,xyz").build(); - List included = APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); + List included = TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); assertThat(included, containsInAnyOrder("abc", "xyz")); } public void testTelemetryTracingNamesExcludeFallback() { - Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING.getKey(), "abc,xyz").build(); + Settings settings = Settings.builder().put(TRACING_APM_NAMES_EXCLUDE_SETTING.getKey(), "abc,xyz").build(); - List included = APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); + List included = TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); assertThat(included, containsInAnyOrder("abc", "xyz")); } public void testTelemetryTracingSanitizeFieldNamesFallback() { - Settings settings = Settings.builder().put(APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES.getKey(), "abc,xyz").build(); + Settings settings = Settings.builder().put(TRACING_APM_SANITIZE_FIELD_NAMES.getKey(), "abc,xyz").build(); - List included = APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); + List included = TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); assertThat(included, containsInAnyOrder("abc", "xyz")); } public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() { - List included = APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(Settings.EMPTY); + List included = TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(Settings.EMPTY); assertThat(included, hasItem("password")); // and more defaults } public void testTelemetrySecretTokenFallback() { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING.getKey(), "verysecret"); + secureSettings.setString(TRACING_APM_SECRET_TOKEN_SETTING.getKey(), "verysecret"); Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - try (SecureString secureString = APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING.get(settings)) { + try (SecureString secureString = TELEMETRY_SECRET_TOKEN_SETTING.get(settings)) { assertEquals("verysecret", secureString.toString()); } @@ -150,12 +256,22 @@ public void testTelemetrySecretTokenFallback() { public void testTelemetryApiKeyFallback() { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(APMAgentSettings.TRACING_APM_API_KEY_SETTING.getKey(), "abc"); + secureSettings.setString(TRACING_APM_API_KEY_SETTING.getKey(), "abc"); Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - try (SecureString secureString = APMAgentSettings.TELEMETRY_API_KEY_SETTING.get(settings)) { + try (SecureString secureString = TELEMETRY_API_KEY_SETTING.get(settings)) { assertEquals("abc", secureString.toString()); } } + + /** + * Check that invalid or forbidden APM agent settings are rejected if their last part resembles an allowed setting. + */ + public void testRejectUnknownSettingResemblingAnAllowedOne() { + Settings settings = Settings.builder().put(APM_AGENT_SETTINGS.getKey() + "unknown.service_name", "true").build(); + + Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); + assertThat(exception.getMessage(), containsString("[telemetry.agent.unknown.service_name]")); + } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 6f12087de7d5e..c47dacacde3d8 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -99,8 +99,8 @@ teardown: --- "Test Drop Processor with Upsert (_bulk)": - skip: - version: ' - 8.12.99' - reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + version: ' - 8.12.0' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: id: "my_pipeline" @@ -140,8 +140,8 @@ teardown: --- "Test Drop Processor with Upsert (_update)": - skip: - version: ' - 8.12.99' - reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + version: ' - 8.12.0' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: id: "my_pipeline" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index 341adaa781ef0..0bf623e8ff263 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -77,8 +77,8 @@ teardown: --- "Test Fail Processor with Upsert (bulk)": - skip: - version: ' - 8.12.99' - reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.13.0' + version: ' - 8.12.0' + reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: id: "my_pipeline" diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index c7ad2f2ea4bb5..ac38b87e93ad0 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -110,6 +110,8 @@ public static RatedRequest createTestItem(boolean forceRequest) { } public void testXContentRoundtrip() throws IOException { + assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); + RatedRequest testItem = createTestItem(randomBoolean()); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); @@ -302,6 +304,8 @@ public void testProfileNotAllowed() { * matter for parsing xContent */ public void testParseFromXContent() throws IOException { + assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); + String querySpecString = """ { "id": "my_qa_query", diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 46271f8c61e9c..071031d2ffd19 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.internal.Client; @@ -259,7 +260,7 @@ public void onFailure(Exception e) { /** * Similar to what CancelTests does: blocks some operations to be able to catch some tasks in running state - * @see CancelTests#testCancel(String, AbstractBulkByScrollRequestBuilder, CancelTests.CancelAssertion, Matcher) + * @see CancelTests#testCancel(ActionType, AbstractBulkByScrollRequestBuilder, CancelTests.CancelAssertion, Matcher) */ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { final Client client = client(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java index b211f7d92f51f..a2911090ab931 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -21,6 +22,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Operation.Origin; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequestBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; @@ -80,15 +82,17 @@ public void clearAllowedOperations() { * Executes the cancellation test */ private void testCancel( - String action, + ActionType action, AbstractBulkByScrollRequestBuilder builder, CancelAssertion assertion, Matcher taskDescriptionMatcher ) throws Exception { createIndex(INDEX); - + // Scroll by 1 so that cancellation is easier to control + builder.source().setSize(1); + AbstractBulkByScrollRequest request = builder.request(); // Total number of documents created for this test (~10 per primary shard per slice) - int numDocs = getNumShards(INDEX).numPrimaries * 10 * builder.request().getSlices(); + int numDocs = getNumShards(INDEX).numPrimaries * 10 * request.getSlices(); ALLOWED_OPERATIONS.release(numDocs); logger.debug("setting up [{}] docs", numDocs); @@ -105,18 +109,15 @@ private void testCancel( assertHitCount(prepareSearch(INDEX).setSize(0), numDocs); assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); - // Scroll by 1 so that cancellation is easier to control - builder.source().setSize(1); - /* Allow a random number of the documents less the number of workers * to be modified by the reindex action. That way at least one worker * is blocked. */ - int numModifiedDocs = randomIntBetween(builder.request().getSlices() * 2, numDocs); + int numModifiedDocs = randomIntBetween(request.getSlices() * 2, numDocs); logger.debug("chose to modify [{}] out of [{}] docs", numModifiedDocs, numDocs); - ALLOWED_OPERATIONS.release(numModifiedDocs - builder.request().getSlices()); + ALLOWED_OPERATIONS.release(numModifiedDocs - request.getSlices()); // Now execute the reindex action... - ActionFuture future = builder.execute(); + ActionFuture future = client().execute(action, request); /* ... and wait for the indexing operation listeners to block. It * is important to realize that some of the workers might have @@ -130,7 +131,7 @@ private void testCancel( ); // 10 seconds is usually fine but on heavily loaded machines this can take a while // Status should show the task running - TaskInfo mainTask = findTaskToCancel(action, builder.request().getSlices()); + TaskInfo mainTask = findTaskToCancel(action.name(), request.getSlices()); BulkByScrollTask.Status status = (BulkByScrollTask.Status) mainTask.status(); assertNull(status.getReasonCancelled()); @@ -150,7 +151,7 @@ private void testCancel( logger.debug("asserting that parent is marked canceled {}", status); assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); - if (builder.request().getSlices() > 1) { + if (request.getSlices() > 1) { boolean foundCancelled = false; ListTasksResponse sliceList = clusterAdmin().prepareListTasks() .setTargetParentTaskId(mainTask.taskId()) @@ -168,11 +169,11 @@ private void testCancel( } logger.debug("unblocking the blocked update"); - ALLOWED_OPERATIONS.release(builder.request().getSlices()); + ALLOWED_OPERATIONS.release(request.getSlices()); // Checks that no more operations are executed assertBusy(() -> { - if (builder.request().getSlices() == 1) { + if (request.getSlices() == 1) { /* We can only be sure that we've drained all the permits if we only use a single worker. Otherwise some worker may have * exhausted all of its documents before we blocked. */ assertEquals(0, ALLOWED_OPERATIONS.availablePermits()); @@ -191,7 +192,7 @@ private void testCancel( String tasks = clusterAdmin().prepareListTasks().setTargetParentTaskId(mainTask.taskId()).setDetailed(true).get().toString(); throw new RuntimeException("Exception while waiting for the response. Running tasks: " + tasks, e); } finally { - if (builder.request().getSlices() >= 1) { + if (request.getSlices() >= 1) { // If we have more than one worker we might not have made all the modifications numModifiedDocs -= ALLOWED_OPERATIONS.availablePermits(); } @@ -221,7 +222,7 @@ public static TaskInfo findTaskToCancel(String actionName, int workerCount) { } public void testReindexCancel() throws Exception { - testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest"), (response, total, modified) -> { + testCancel(ReindexAction.INSTANCE, reindex().source(INDEX).destination("dest"), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); refresh("dest"); @@ -239,17 +240,22 @@ public void testUpdateByQueryCancel() throws Exception { }"""); assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); - testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX), (response, total, modified) -> { - assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); - assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); - }, equalTo("update-by-query [" + INDEX + "]")); + testCancel( + UpdateByQueryAction.INSTANCE, + updateByQuery().setPipeline("set-processed").source(INDEX), + (response, total, modified) -> { + assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); + assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); + }, + equalTo("update-by-query [" + INDEX + "]") + ); assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get()); } public void testDeleteByQueryCancel() throws Exception { testCancel( - DeleteByQueryAction.NAME, + DeleteByQueryAction.INSTANCE, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); @@ -261,7 +267,7 @@ public void testDeleteByQueryCancel() throws Exception { public void testReindexCancelWithWorkers() throws Exception { testCancel( - ReindexAction.NAME, + ReindexAction.INSTANCE, reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest").setSlices(5), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); @@ -283,7 +289,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); testCancel( - UpdateByQueryAction.NAME, + UpdateByQueryAction.INSTANCE, updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); @@ -297,7 +303,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { public void testDeleteByQueryCancelWithWorkers() throws Exception { testCancel( - DeleteByQueryAction.NAME, + DeleteByQueryAction.INSTANCE, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java index 855cb1863f399..24753c2b9ae6a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java @@ -39,7 +39,7 @@ public void testDeprecatedSort() { int subsetSize = randomIntBetween(1, max - 1); ReindexRequestBuilder copy = new ReindexRequestBuilder(client()).source("source").destination("dest").refresh(true); copy.maxDocs(subsetSize); - copy.request().addSortField("foo", SortOrder.DESC); + copy.source().addSort("foo", SortOrder.DESC); assertThat(copy.get(), matcher().created(subsetSize)); assertHitCount(client().prepareSearch("dest").setSize(0), subsetSize); diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml index b107788b2fb36..a3969479d0914 100644 --- a/qa/apm/docker-compose.yml +++ b/qa/apm/docker-compose.yml @@ -56,13 +56,13 @@ services: - xpack.security.authc.token.enabled=true - xpack.security.enabled=true # APM specific settings. We don't configure `secret_key` because Kibana is configured with a blank key - - tracing.apm.enabled=true - - tracing.apm.agent.server_url=http://apmserver:8200 + - telemetry.tracing.enabled=true + - telemetry.agent.server_url=http://apmserver:8200 # Send traces to APM server aggressively - - tracing.apm.agent.metrics_interval=1s + - telemetry.agent.metrics_interval=1s # Record everything - - tracing.apm.agent.transaction_sample_rate=1 - - tracing.apm.agent.log_level=debug + - telemetry.agent.transaction_sample_rate=1 + - telemetry.agent.log_level=debug healthcheck: interval: 20s retries: 10 diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index e3796683d1d32..28d372671ee99 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -45,10 +45,16 @@ excludeList.add('aggregations/filters_bucket/cache hits') // Validation (and associated tests) are supposed to be skipped/have // different behaviour for versions before and after 8.10 but mixed // cluster tests may not respect that - see the comment above. -excludeList.add('cluster.desired_nodes/10_basic/Test settings are validated') -excludeList.add('cluster.desired_nodes/10_basic/Test unknown settings are forbidden in known versions') -excludeList.add('cluster.desired_nodes/10_basic/Test unknown settings are allowed in future versions') -excludeList.add('cluster.desired_nodes/10_basic/Test some settings can be overridden') +// Same for node version, which has been deprecated (and made optional) +// starting from 8.13 +excludeList.add('cluster.desired_nodes/11_old_format/Test settings are validated') +excludeList.add('cluster.desired_nodes/11_old_format/Test unknown settings are forbidden in known versions') +excludeList.add('cluster.desired_nodes/11_old_format/Test unknown settings are allowed in future versions') +excludeList.add('cluster.desired_nodes/11_old_format/Test some settings can be overridden') +excludeList.add('cluster.desired_nodes/11_old_format/Test node version must be at least the current master version') +excludeList.add('cluster.desired_nodes/11_old_format/Test node version is required') +excludeList.add('cluster.desired_nodes/11_old_format/Test node version must have content') +excludeList.add('cluster.desired_nodes/11_old_format/Test node version can not be null') excludeList.add('cluster.desired_nodes/20_dry_run/Test validation works for dry run updates') BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json new file mode 100644 index 0000000000000..511e925a12e1d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json @@ -0,0 +1,28 @@ +{ + "connector_secret.delete": { + "documentation": { + "url": null, + "description": "Deletes a connector secret." + }, + "stability": "experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_connector/_secret/{id}", + "methods":[ "DELETE" ], + "parts":{ + "id":{ + "type":"string", + "description":"The ID of the secret" + } + } + } + ] + }, + "params":{} + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index dd71b82c106a8..5435389452a51 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -41,13 +41,13 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); }); @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ); assertResponse(client().search(searchRequest), searchResponse -> { assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 9661f4ebb966d..cf8decc5655ec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -984,7 +984,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, List tags = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { tags.add("tag_" + i); - builders.add(prepareIndex("test").setId(i + "").setSource("tags", tags)); + builders.add(prepareIndex("test").setId(i + "").setSource("tags", List.copyOf(tags))); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index a856ee36aadc2..5144aee654b31 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -47,7 +47,7 @@ public void testScroll() { assertNoFailures(response); if (respNum == 1) { // initial response. - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Terms terms = aggregations.get("f"); assertEquals(Math.min(numDocs, 3L), terms.getBucketByKey("0").getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index fc0a93ad3d290..5b8c238d7b7db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -66,7 +66,7 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t prepareSearch("idx").addAggregation(missing("missing_values").field("value")) .addAggregation(terms("values").field("value").collectMode(aggCollectionMode)), response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); Missing missing = aggs.get("missing_values"); assertNotNull(missing); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java index f22e0a2931634..3634005d37ba4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java @@ -43,7 +43,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value")) ).addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)), response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertNotNull(aggs); Terms terms = aggs.get("the_terms"); @@ -52,7 +52,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { List buckets = terms.getBuckets(); for (Terms.Bucket bucket : buckets) { - Aggregations subAggs = bucket.getAggregations(); + InternalAggregations subAggs = bucket.getAggregations(); assertNotNull(subAggs); Sum sum = subAggs.get("the_sum"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index da1376a300728..21a607f113f14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -19,9 +19,9 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory; @@ -136,7 +136,7 @@ public void testXContentResponse() throws Exception { StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); + Map aggs = classBucket.getAggregations().asMap(); assertTrue(aggs.containsKey("sig_terms")); SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); assertThat(agg.getBuckets().size(), equalTo(1)); @@ -331,7 +331,7 @@ public void testBackgroundVsSeparateSet( double score10Background = sigTerms1.getBucketByKey("0").getSignificanceScore(); double score11Background = sigTerms1.getBucketByKey("1").getSignificanceScore(); - Aggregations aggs = response2.getAggregations(); + InternalAggregations aggs = response2.getAggregations(); sigTerms0 = (SignificantTerms) ((InternalFilter) aggs.get("0")).getAggregations().getAsMap().get("sig_terms"); double score00SeparateSets = sigTerms0.getBucketByKey("0").getSignificanceScore(); @@ -386,7 +386,7 @@ public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristi assertThat(classes.getBuckets().size(), equalTo(2)); Iterator classBuckets = classes.getBuckets().iterator(); - Aggregations aggregations = classBuckets.next().getAggregations(); + InternalAggregations aggregations = classBuckets.next().getAggregations(); SignificantTerms sigTerms = aggregations.get("mySignificantTerms"); List classA = sigTerms.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index d40264d9facf0..02c45c4aade1b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -19,8 +19,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -1037,7 +1037,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { for (Bucket b : buckets) { assertThat(b, notNullValue()); assertThat(b.getDocCount(), equalTo(1L)); - Aggregations subAggs = b.getAggregations(); + InternalAggregations subAggs = b.getAggregations(); assertThat(subAggs, notNullValue()); assertThat(subAggs.asList().size(), equalTo(1)); Aggregation subAgg = subAggs.get("scripted"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java index 7509cf3815085..3c9fbca476c0d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java @@ -19,7 +19,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -44,7 +44,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase -abstract class BucketMetricsPipeLineAggregationTestCase extends ESIntegTestCase { +abstract class BucketMetricsPipeLineAggregationTestCase extends ESIntegTestCase { static final String SINGLE_VALUED_FIELD_NAME = "l_value"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 6562c485b9204..421a5d2d36254 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; -public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { @Override protected ExtendedStatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) { @@ -43,7 +43,7 @@ protected void assertResult( IntToDoubleFunction buckets, Function bucketKeys, int numBuckets, - ExtendedStatsBucket pipelineBucket + InternalExtendedStatsBucket pipelineBucket ) { double sum = 0; int count = 0; @@ -71,7 +71,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(ExtendedStatsBucket bucket) { + protected double getNestedMetric(InternalExtendedStatsBucket bucket) { return bucket.getAvg(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index c05390bac40ae..b4193b8f90e1f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; -public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase { private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 }; @@ -46,7 +46,7 @@ protected void assertResult( IntToDoubleFunction bucketValues, Function bucketKeys, int numBuckets, - PercentilesBucket pipelineBucket + InternalPercentilesBucket pipelineBucket ) { double[] values = new double[numBuckets]; for (int i = 0; i < numBuckets; ++i) { @@ -62,7 +62,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(PercentilesBucket bucket) { + protected double getNestedMetric(InternalPercentilesBucket bucket) { return bucket.percentile(50); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java index 7040f3bf115f3..cd87bd98a0926 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java @@ -14,7 +14,7 @@ import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.statsBucket; import static org.hamcrest.Matchers.equalTo; -public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { +public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase { @Override protected StatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) { @@ -26,7 +26,7 @@ protected void assertResult( IntToDoubleFunction bucketValues, Function bucketKeys, int numBuckets, - StatsBucket pipelineBucket + InternalStatsBucket pipelineBucket ) { double sum = 0; int count = 0; @@ -52,7 +52,7 @@ protected String nestedMetric() { } @Override - protected double getNestedMetric(StatsBucket bucket) { + protected double getNestedMetric(InternalStatsBucket bucket) { return bucket.getAvg(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 37c78ec568332..31524765d4e14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -22,7 +22,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.range.InternalGeoDistance; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.test.ESIntegTestCase; @@ -216,7 +216,7 @@ public void testGeoDistanceAggregation() throws IOException { .addRange(0, 25000) ), response -> { - Aggregations aggregations = response.getAggregations(); + InternalAggregations aggregations = response.getAggregations(); assertNotNull(aggregations); InternalGeoDistance geoDistance = aggregations.get(name); assertNotNull(geoDistance); diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index d3224bb048393..22e02652e9f68 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -101,6 +101,14 @@ public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } + /** + * Returns a string representing the Elasticsearch release version of this transport version, + * if applicable for this deployment, otherwise the raw version number. + */ + public String toReleaseVersion() { + return TransportVersions.VERSION_LOOKUP.apply(id); + } + @Override public String toString() { return Integer.toString(id); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d0efb612493fc..5d98451e49100 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -165,6 +165,7 @@ static TransportVersion def(int id) { public static final TransportVersion REQUIRE_DATA_STREAM_ADDED = def(8_578_00_0); public static final TransportVersion ML_INFERENCE_COHERE_EMBEDDINGS_ADDED = def(8_579_00_0); public static final TransportVersion DESIRED_NODE_VERSION_OPTIONAL_STRING = def(8_580_00_0); + public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED = def(8_581_00_0); /* * STOP! READ THIS FIRST! No, really, @@ -289,11 +290,7 @@ static Collection getAllVersions() { return VERSION_IDS.values(); } - private static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class); - - public static String toReleaseVersion(TransportVersion version) { - return VERSION_LOOKUP.apply(version.id()); - } + static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class); // no instance private TransportVersions() {} diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestLazyBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestLazyBuilder.java new file mode 100644 index 0000000000000..7779b71c46717 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestLazyBuilder.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.core.TimeValue; + +import java.util.Objects; + +/** + * This class is similar to ActionRequestBuilder, except that it does not build the request until the request() method is called. + * @param + * @param + */ +public abstract class ActionRequestLazyBuilder + implements + RequestBuilder { + + protected final ActionType action; + protected final ElasticsearchClient client; + + protected ActionRequestLazyBuilder(ElasticsearchClient client, ActionType action) { + Objects.requireNonNull(action, "action must not be null"); + this.action = action; + this.client = client; + } + + /** + * This method creates the request. The caller of this method is responsible for calling Request#decRef. + * @return A newly-built Request, fully initialized by this builder. + */ + public abstract Request request(); + + public ActionFuture execute() { + return client.execute(action, request()); + } + + /** + * Short version of execute().actionGet(). + */ + public Response get() { + return execute().actionGet(); + } + + /** + * Short version of execute().actionGet(). + */ + public Response get(TimeValue timeout) { + return execute().actionGet(timeout); + } + + public void execute(ActionListener listener) { + client.execute(action, request(), listener); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index 2b961b6bc7351..16e5430063650 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -8,12 +8,17 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestLazyBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.RequestBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -23,26 +28,50 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + /** * A bulk request holds an ordered {@link IndexRequest}s and {@link DeleteRequest}s and allows to executes * it in a single batch. */ -public class BulkRequestBuilder extends ActionRequestBuilder implements WriteRequestBuilder { +public class BulkRequestBuilder extends ActionRequestLazyBuilder + implements + WriteRequestBuilder { + private final String globalIndex; + /* + * The following 3 variables hold the list of requests that make up this bulk. Only one can be non-empty. That is, users can't add + * some IndexRequests and some IndexRequestBuilders. They need to pick one (preferably builders) and stick with it. + */ + private final List> requests = new ArrayList<>(); + private final List framedData = new ArrayList<>(); + private final List> requestBuilders = new ArrayList<>(); + private ActiveShardCount waitForActiveShards; + private TimeValue timeout; + private String timeoutString; + private String globalPipeline; + private String globalRouting; + private WriteRequest.RefreshPolicy refreshPolicy; + private String refreshPolicyString; public BulkRequestBuilder(ElasticsearchClient client, @Nullable String globalIndex) { - super(client, BulkAction.INSTANCE, new BulkRequest(globalIndex)); + super(client, BulkAction.INSTANCE); + this.globalIndex = globalIndex; } public BulkRequestBuilder(ElasticsearchClient client) { - super(client, BulkAction.INSTANCE, new BulkRequest()); + this(client, null); } /** * Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest} * (for example, if no id is provided, one will be generated, or usage of the create flag). + * @deprecated use {@link #add(IndexRequestBuilder)} instead */ + @Deprecated public BulkRequestBuilder add(IndexRequest request) { - super.request.add(request); + requests.add(request); return this; } @@ -51,15 +80,17 @@ public BulkRequestBuilder add(IndexRequest request) { * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkRequestBuilder add(IndexRequestBuilder request) { - super.request.add(request.request()); + requestBuilders.add(request); return this; } /** * Adds an {@link DeleteRequest} to the list of actions to execute. + * @deprecated use {@link #add(DeleteRequestBuilder)} instead */ + @Deprecated public BulkRequestBuilder add(DeleteRequest request) { - super.request.add(request); + requests.add(request); return this; } @@ -67,15 +98,17 @@ public BulkRequestBuilder add(DeleteRequest request) { * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkRequestBuilder add(DeleteRequestBuilder request) { - super.request.add(request.request()); + requestBuilders.add(request); return this; } /** * Adds an {@link UpdateRequest} to the list of actions to execute. + * @deprecated use {@link #add(UpdateRequestBuilder)} instead */ + @Deprecated public BulkRequestBuilder add(UpdateRequest request) { - super.request.add(request); + requests.add(request); return this; } @@ -83,7 +116,7 @@ public BulkRequestBuilder add(UpdateRequest request) { * Adds an {@link UpdateRequest} to the list of actions to execute. */ public BulkRequestBuilder add(UpdateRequestBuilder request) { - super.request.add(request.request()); + requestBuilders.add(request); return this; } @@ -91,7 +124,7 @@ public BulkRequestBuilder add(UpdateRequestBuilder request) { * Adds a framed data in binary format */ public BulkRequestBuilder add(byte[] data, int from, int length, XContentType xContentType) throws Exception { - request.add(data, from, length, null, xContentType); + framedData.add(new FramedData(data, from, length, null, xContentType)); return this; } @@ -100,7 +133,7 @@ public BulkRequestBuilder add(byte[] data, int from, int length, XContentType xC */ public BulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, XContentType xContentType) throws Exception { - request.add(data, from, length, defaultIndex, xContentType); + framedData.add(new FramedData(data, from, length, defaultIndex, xContentType)); return this; } @@ -109,7 +142,7 @@ public BulkRequestBuilder add(byte[] data, int from, int length, @Nullable Strin * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. */ public BulkRequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShards) { - request.waitForActiveShards(waitForActiveShards); + this.waitForActiveShards = waitForActiveShards; return this; } @@ -126,7 +159,7 @@ public BulkRequestBuilder setWaitForActiveShards(final int waitForActiveShards) * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ public final BulkRequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); + this.timeout = timeout; return this; } @@ -134,7 +167,7 @@ public final BulkRequestBuilder setTimeout(TimeValue timeout) { * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ public final BulkRequestBuilder setTimeout(String timeout) { - request.timeout(timeout); + this.timeoutString = timeout; return this; } @@ -142,16 +175,96 @@ public final BulkRequestBuilder setTimeout(String timeout) { * The number of actions currently in the bulk. */ public int numberOfActions() { - return request.numberOfActions(); + return requests.size() + requestBuilders.size() + framedData.size(); } public BulkRequestBuilder pipeline(String globalPipeline) { - request.pipeline(globalPipeline); + this.globalPipeline = globalPipeline; return this; } public BulkRequestBuilder routing(String globalRouting) { - request.routing(globalRouting); + this.globalRouting = globalRouting; + return this; + } + + @Override + public BulkRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public BulkRequestBuilder setRefreshPolicy(String refreshPolicy) { + this.refreshPolicyString = refreshPolicy; return this; } + + @Override + public BulkRequest request() { + validate(); + BulkRequest request = new BulkRequest(globalIndex); + for (RequestBuilder requestBuilder : requestBuilders) { + ActionRequest childRequest = requestBuilder.request(); + request.add((DocWriteRequest) childRequest); + } + for (DocWriteRequest childRequest : requests) { + request.add(childRequest); + } + for (FramedData framedData : framedData) { + try { + request.add(framedData.data, framedData.from, framedData.length, framedData.defaultIndex, framedData.xContentType); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + if (waitForActiveShards != null) { + request.waitForActiveShards(waitForActiveShards); + } + if (timeout != null) { + request.timeout(timeout); + } + if (timeoutString != null) { + request.timeout(timeoutString); + } + if (globalPipeline != null) { + request.pipeline(globalPipeline); + } + if (globalRouting != null) { + request.routing(globalRouting); + } + if (refreshPolicy != null) { + request.setRefreshPolicy(refreshPolicy); + } + if (refreshPolicyString != null) { + request.setRefreshPolicy(refreshPolicyString); + } + return request; + } + + private void validate() { + if (countNonEmptyLists(requestBuilders, requests, framedData) > 1) { + throw new IllegalStateException( + "Must use only request builders, requests, or byte arrays within a single bulk request. Cannot mix and match" + ); + } + if (timeout != null && timeoutString != null) { + throw new IllegalStateException("Must use only one setTimeout method"); + } + if (refreshPolicy != null && refreshPolicyString != null) { + throw new IllegalStateException("Must use only one setRefreshPolicy method"); + } + } + + private int countNonEmptyLists(List... lists) { + int sum = 0; + for (List list : lists) { + if (list.isEmpty() == false) { + sum++; + } + } + return sum; + } + + private record FramedData(byte[] data, int from, int length, @Nullable String defaultIndex, XContentType xContentType) {} } diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 0cc1d51c4d97e..5eab04663e959 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -8,17 +8,23 @@ package org.elasticsearch.action.get; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.OperationRouting; @@ -27,15 +33,17 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -184,8 +192,8 @@ private void asyncGet(GetRequest request, ShardId shardId, ActionListener listener) throws IOException { ShardId shardId = indexShard.shardId(); - var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); if (request.refresh()) { + var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); logger.trace("send refresh action for shard {} to node {}", shardId, node.getId()); var refreshRequest = new BasicReplicationRequest(shardId); refreshRequest.setParentTask(request.getParentTask()); @@ -194,44 +202,97 @@ private void handleGetOnUnpromotableShard(GetRequest request, IndexShard indexSh refreshRequest, listener.delegateFailureAndWrap((l, replicationResponse) -> super.asyncShardOperation(request, shardId, l)) ); - } else if (request.realtime()) { - TransportGetFromTranslogAction.Request getFromTranslogRequest = new TransportGetFromTranslogAction.Request(request, shardId); - getFromTranslogRequest.setParentTask(request.getParentTask()); - transportService.sendRequest( - node, - TransportGetFromTranslogAction.NAME, - getFromTranslogRequest, - new ActionListenerResponseHandler<>(listener.delegateFailure((l, r) -> { - if (r.getResult() != null) { - logger.debug("received result for real-time get for id '{}' from promotable shard", request.id()); - l.onResponse(new GetResponse(r.getResult())); - } else { - logger.debug( - "no result for real-time get for id '{}' from promotable shard (segment generation to wait for: {})", - request.id(), - r.segmentGeneration() - ); - if (r.segmentGeneration() == -1) { - // Nothing to wait for (no previous unsafe generation), just handle the Get locally. - ActionRunnable.supply(l, () -> shardOperation(request, shardId)).run(); - } else { - assert r.segmentGeneration() > -1L; - assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), - listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) - ); - } - } - }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) + return; + } + if (request.realtime()) { + final var state = clusterService.state(); + final var observer = new ClusterStateObserver( + state, + clusterService, + TimeValue.timeValueSeconds(60), + logger, + threadPool.getThreadContext() ); + getFromTranslog(request, indexShard, state, observer, listener); } else { // A non-real-time get with no explicit refresh requested. super.asyncShardOperation(request, shardId, listener); } } + private void getFromTranslog( + GetRequest request, + IndexShard indexShard, + ClusterState state, + ClusterStateObserver observer, + ActionListener listener + ) { + tryGetFromTranslog(request, indexShard, state, listener.delegateResponse((l, e) -> { + final var cause = ExceptionsHelper.unwrapCause(e); + logger.debug("get_from_translog failed", cause); + if (cause instanceof ShardNotFoundException + || cause instanceof IndexNotFoundException + || cause instanceof NoShardAvailableActionException + || cause instanceof UnavailableShardsException) { + logger.debug("retrying get_from_translog"); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + getFromTranslog(request, indexShard, state, observer, l); + } + + @Override + public void onClusterServiceClose() { + l.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + l.onFailure(new ElasticsearchException("Timed out retrying get_from_translog", cause)); + } + }); + } else { + l.onFailure(e); + } + })); + } + + private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, ClusterState state, ActionListener listener) { + ShardId shardId = indexShard.shardId(); + var node = getCurrentNodeOfPrimary(state, shardId); + TransportGetFromTranslogAction.Request getFromTranslogRequest = new TransportGetFromTranslogAction.Request(request, shardId); + getFromTranslogRequest.setParentTask(request.getParentTask()); + transportService.sendRequest( + node, + TransportGetFromTranslogAction.NAME, + getFromTranslogRequest, + new ActionListenerResponseHandler<>(listener.delegateFailure((l, r) -> { + if (r.getResult() != null) { + logger.debug("received result for real-time get for id '{}' from promotable shard", request.id()); + l.onResponse(new GetResponse(r.getResult())); + } else { + logger.debug( + "no result for real-time get for id '{}' from promotable shard (segment generation to wait for: {})", + request.id(), + r.segmentGeneration() + ); + if (r.segmentGeneration() == -1) { + // Nothing to wait for (no previous unsafe generation), just handle the Get locally. + ActionRunnable.supply(l, () -> shardOperation(request, shardId)).run(); + } else { + assert r.segmentGeneration() > -1L; + assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; + indexShard.waitForPrimaryTermAndGeneration( + r.primaryTerm(), + r.segmentGeneration(), + listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) + ); + } + } + }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) + ); + } + static DiscoveryNode getCurrentNodeOfPrimary(ClusterState clusterState, ShardId shardId) { var shardRoutingTable = clusterState.routingTable().shardRoutingTable(shardId); if (shardRoutingTable.primaryShard() == null || shardRoutingTable.primaryShard().active() == false) { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java index 1b180874b433d..cd47531f81599 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java @@ -40,7 +40,6 @@ import java.io.IOException; import java.util.Objects; -// TODO(ES-5727): add a retry mechanism to TransportGetFromTranslogAction public class TransportGetFromTranslogAction extends HandledTransportAction< TransportGetFromTranslogAction.Request, TransportGetFromTranslogAction.Response> { diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index eda28eb4e139e..b1ad328abda92 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -165,7 +165,8 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio isRetry = in.readBoolean(); autoGeneratedTimestamp = in.readLong(); if (in.readBoolean()) { - contentType = in.readEnum(XContentType.class); + // faster than StreamInput::readEnum, do not replace we read a lot of these instances at times + contentType = XContentType.ofOrdinal(in.readByte()); } else { contentType = null; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index d3bc2d4d1c9e6..8426ac68df139 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -27,7 +27,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; @@ -69,7 +68,7 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO private static final ParseField NUM_REDUCE_PHASES = new ParseField("num_reduce_phases"); private final SearchHits hits; - private final Aggregations aggregations; + private final InternalAggregations aggregations; private final Suggest suggest; private final SearchProfileResults profileResults; private final boolean timedOut; @@ -120,7 +119,7 @@ public SearchResponse(StreamInput in) throws IOException { public SearchResponse( SearchHits hits, - Aggregations aggregations, + InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, @@ -185,7 +184,7 @@ public SearchResponse( public SearchResponse( SearchHits hits, - Aggregations aggregations, + InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, @@ -257,7 +256,7 @@ public SearchHits getHits() { * Aggregations in this response. "empty" aggregations could be * either {@code null} or {@link InternalAggregations#EMPTY}. */ - public @Nullable Aggregations getAggregations() { + public @Nullable InternalAggregations getAggregations() { return aggregations; } @@ -449,7 +448,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); String currentFieldName = parser.currentName(); SearchHits hits = null; - Aggregations aggs = null; + InternalAggregations aggs = null; Suggest suggest = null; SearchProfileResults profile = null; boolean timedOut = false; @@ -485,8 +484,8 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } else if (token == Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { hits = SearchHits.fromXContent(parser); - } else if (Aggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { - aggs = Aggregations.fromXContent(parser); + } else if (InternalAggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { + aggs = InternalAggregations.fromXContent(parser); } else if (Suggest.NAME.equals(currentFieldName)) { suggest = Suggest.fromXContent(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { @@ -550,7 +549,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE public void writeTo(StreamOutput out) throws IOException { assert hasReferences(); hits.writeTo(out); - out.writeOptionalWriteable((InternalAggregations) aggregations); + out.writeOptionalWriteable(aggregations); out.writeOptionalWriteable(suggest); out.writeBoolean(timedOut); out.writeOptionalBoolean(terminatedEarly); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 9db9d65bc3dac..ae8c749475c5d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -147,7 +147,7 @@ public SearchResponse getMergedResponse(Clusters clusters) { profileResults.putAll(searchResponse.getProfileResults()); if (searchResponse.hasAggregations()) { - InternalAggregations internalAggs = (InternalAggregations) searchResponse.getAggregations(); + InternalAggregations internalAggs = searchResponse.getAggregations(); aggs.add(internalAggs); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index d52a585b3e792..a3763bf101b15 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; @@ -45,7 +45,7 @@ public class SearchResponseSections implements RefCounted { 1 ); protected final SearchHits hits; - protected final Aggregations aggregations; + protected final InternalAggregations aggregations; protected final Suggest suggest; protected final SearchProfileResults profileResults; protected final boolean timedOut; @@ -56,7 +56,7 @@ public class SearchResponseSections implements RefCounted { public SearchResponseSections( SearchHits hits, - Aggregations aggregations, + InternalAggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, @@ -91,7 +91,7 @@ public final SearchHits hits() { return hits; } - public final Aggregations aggregations() { + public final InternalAggregations aggregations() { return aggregations; } diff --git a/server/src/main/java/org/elasticsearch/action/support/WriteRequest.java b/server/src/main/java/org/elasticsearch/action/support/WriteRequest.java index 0df640e3a50a1..64355a32c3a63 100644 --- a/server/src/main/java/org/elasticsearch/action/support/WriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/WriteRequest.java @@ -92,8 +92,10 @@ public static RefreshPolicy parse(String value) { throw new IllegalArgumentException("Unknown value for refresh: [" + value + "]."); } + private static final RefreshPolicy[] values = values(); + public static RefreshPolicy readFrom(StreamInput in) throws IOException { - return RefreshPolicy.values()[in.readByte()]; + return values[in.readByte()]; } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java index 4549858c2508b..569335cc65a5d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/Explanations.java @@ -79,9 +79,11 @@ public static final class Rebalance { activities. The shard will be rebalanced when those activities finish. Please wait."""; public static final String CANNOT_REBALANCE_CAN_ALLOCATE = """ - Elasticsearch is allowed to allocate this shard to another node but it isn't allowed to rebalance the shard there. If you \ - expect this shard to be rebalanced to another node, find this node in the node-by-node explanation and address the reasons \ - which prevent Elasticsearch from rebalancing this shard there."""; + Elasticsearch is allowed to allocate this shard on another node, and there is at least one node to which it could move this \ + shard that would improve the overall cluster balance, but it isn't allowed to rebalance this shard there. If you expect this \ + shard to be rebalanced to another node, check the cluster-wide rebalancing decisions and address any reasons preventing \ + Elasticsearch from rebalancing shards within the cluster, and then find the expected node in the node-by-node explanation and \ + address the reasons which prevent Elasticsearch from moving this shard there."""; public static final String CANNOT_REBALANCE_CANNOT_ALLOCATE = """ Elasticsearch is not allowed to allocate or rebalance this shard to another node. If you expect this shard to be rebalanced to \ diff --git a/server/src/main/java/org/elasticsearch/common/geo/LuceneGeometriesUtils.java b/server/src/main/java/org/elasticsearch/common/geo/LuceneGeometriesUtils.java new file mode 100644 index 0000000000000..c9d4b1c534fef --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/geo/LuceneGeometriesUtils.java @@ -0,0 +1,449 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.geo; + +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.XYGeometry; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.GeometryCollection; +import org.elasticsearch.geometry.GeometryVisitor; +import org.elasticsearch.geometry.Line; +import org.elasticsearch.geometry.LinearRing; +import org.elasticsearch.geometry.MultiLine; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.MultiPolygon; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.ShapeType; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; + +public class LuceneGeometriesUtils { + + interface Quantizer { + double quantizeLat(double lat); + + double quantizeLon(double lon); + + double[] quantizeLats(double[] lats); + + double[] quantizeLons(double[] lons); + } + + static final Quantizer NOOP_QUANTIZER = new Quantizer() { + @Override + public double quantizeLat(double lat) { + return lat; + } + + @Override + public double quantizeLon(double lon) { + return lon; + } + + @Override + public double[] quantizeLats(double[] lats) { + return lats; + } + + @Override + public double[] quantizeLons(double[] lons) { + return lons; + } + }; + + static Quantizer LATLON_QUANTIZER = new Quantizer() { + @Override + public double quantizeLat(double lat) { + return GeoUtils.quantizeLat(lat); + } + + @Override + public double quantizeLon(double lon) { + return GeoUtils.quantizeLon(lon); + } + + @Override + public double[] quantizeLats(double[] lats) { + return Arrays.stream(lats).map(this::quantizeLat).toArray(); + } + + @Override + public double[] quantizeLons(double[] lons) { + return Arrays.stream(lons).map(this::quantizeLon).toArray(); + } + }; + + /** + * Transform an Elasticsearch {@link Geometry} into a lucene {@link LatLonGeometry} + * + * @param geometry the geometry to transform + * @param quantize if true, the coordinates of the geometry will be quantized using lucene quantization. + * This is useful for queries so the latitude and longitude values to match the values on the index. + * @param checker call for every {@link ShapeType} found in the Geometry. It allows to throw an error if a geometry is + * not supported. + * + * @return an array of {@link LatLonGeometry} + */ + public static LatLonGeometry[] toLatLonGeometry(Geometry geometry, boolean quantize, Consumer checker) { + if (geometry == null || geometry.isEmpty()) { + return new LatLonGeometry[0]; + } + if (GeometryNormalizer.needsNormalize(Orientation.CCW, geometry)) { + // make geometry lucene friendly + geometry = GeometryNormalizer.apply(Orientation.CCW, geometry); + } + final List geometries = new ArrayList<>(); + final Quantizer quantizer = quantize ? LATLON_QUANTIZER : NOOP_QUANTIZER; + geometry.visit(new GeometryVisitor<>() { + @Override + public Void visit(Circle circle) { + checker.accept(ShapeType.CIRCLE); + if (circle.isEmpty() == false) { + geometries.add(toLatLonCircle(circle, quantizer)); + } + return null; + } + + @Override + public Void visit(GeometryCollection collection) { + checker.accept(ShapeType.GEOMETRYCOLLECTION); + if (collection.isEmpty() == false) { + for (org.elasticsearch.geometry.Geometry shape : collection) { + shape.visit(this); + } + } + return null; + } + + @Override + public Void visit(org.elasticsearch.geometry.Line line) { + checker.accept(ShapeType.LINESTRING); + if (line.isEmpty() == false) { + geometries.add(toLatLonLine(line, quantizer)); + } + return null; + } + + @Override + public Void visit(LinearRing ring) { + throw new IllegalArgumentException("Found an unsupported shape LinearRing"); + } + + @Override + public Void visit(MultiLine multiLine) { + checker.accept(ShapeType.MULTILINESTRING); + if (multiLine.isEmpty() == false) { + for (Line line : multiLine) { + visit(line); + } + } + return null; + } + + @Override + public Void visit(MultiPoint multiPoint) { + checker.accept(ShapeType.MULTIPOINT); + if (multiPoint.isEmpty() == false) { + for (Point point : multiPoint) { + visit(point); + } + } + return null; + } + + @Override + public Void visit(MultiPolygon multiPolygon) { + checker.accept(ShapeType.MULTIPOLYGON); + if (multiPolygon.isEmpty() == false) { + for (Polygon polygon : multiPolygon) { + visit(polygon); + } + } + return null; + } + + @Override + public Void visit(Point point) { + checker.accept(ShapeType.POINT); + if (point.isEmpty() == false) { + geometries.add(toLatLonPoint(point, quantizer)); + } + return null; + } + + @Override + public Void visit(org.elasticsearch.geometry.Polygon polygon) { + checker.accept(ShapeType.POLYGON); + if (polygon.isEmpty() == false) { + geometries.add(toLatLonPolygon(polygon, quantizer)); + } + return null; + } + + @Override + public Void visit(Rectangle r) { + checker.accept(ShapeType.ENVELOPE); + if (r.isEmpty() == false) { + geometries.add(toLatLonRectangle(r, quantizer)); + } + return null; + } + }); + return geometries.toArray(new LatLonGeometry[0]); + } + + /** + * Transform an Elasticsearch {@link Point} into a lucene {@link org.apache.lucene.geo.Point} + */ + public static org.apache.lucene.geo.Point toLatLonPoint(Point point) { + return toLatLonPoint(point, NOOP_QUANTIZER); + } + + private static org.apache.lucene.geo.Point toLatLonPoint(Point point, Quantizer quantizer) { + return new org.apache.lucene.geo.Point(quantizer.quantizeLat(point.getLat()), quantizer.quantizeLon(point.getLon())); + } + + /** + * Transform an Elasticsearch {@link Line} into a lucene {@link org.apache.lucene.geo.Line} + */ + public static org.apache.lucene.geo.Line toLatLonLine(Line line) { + return toLatLonLine(line, NOOP_QUANTIZER); + } + + private static org.apache.lucene.geo.Line toLatLonLine(Line line, Quantizer quantizer) { + return new org.apache.lucene.geo.Line(quantizer.quantizeLats(line.getLats()), quantizer.quantizeLons(line.getLons())); + } + + /** + * Transform an Elasticsearch {@link Polygon} into a lucene {@link org.apache.lucene.geo.Polygon} + */ + public static org.apache.lucene.geo.Polygon toLatLonPolygon(Polygon polygon) { + return toLatLonPolygon(polygon, NOOP_QUANTIZER); + } + + private static org.apache.lucene.geo.Polygon toLatLonPolygon(Polygon polygon, Quantizer quantizer) { + org.apache.lucene.geo.Polygon[] holes = new org.apache.lucene.geo.Polygon[polygon.getNumberOfHoles()]; + for (int i = 0; i < holes.length; i++) { + holes[i] = new org.apache.lucene.geo.Polygon( + quantizer.quantizeLats(polygon.getHole(i).getY()), + quantizer.quantizeLons(polygon.getHole(i).getX()) + ); + } + return new org.apache.lucene.geo.Polygon( + quantizer.quantizeLats(polygon.getPolygon().getY()), + quantizer.quantizeLons(polygon.getPolygon().getX()), + holes + ); + + } + + /** + * Transform an Elasticsearch {@link Rectangle} into a lucene {@link org.apache.lucene.geo.Rectangle} + */ + public static org.apache.lucene.geo.Rectangle toLatLonRectangle(Rectangle rectangle) { + return toLatLonRectangle(rectangle, NOOP_QUANTIZER); + } + + private static org.apache.lucene.geo.Rectangle toLatLonRectangle(Rectangle r, Quantizer quantizer) { + return new org.apache.lucene.geo.Rectangle( + quantizer.quantizeLat(r.getMinLat()), + quantizer.quantizeLat(r.getMaxLat()), + quantizer.quantizeLon(r.getMinLon()), + quantizer.quantizeLon(r.getMaxLon()) + ); + } + + /** + * Transform an Elasticsearch {@link Circle} into a lucene {@link org.apache.lucene.geo.Circle} + */ + public static org.apache.lucene.geo.Circle toLatLonCircle(Circle circle) { + return toLatLonCircle(circle, NOOP_QUANTIZER); + } + + private static org.apache.lucene.geo.Circle toLatLonCircle(Circle circle, Quantizer quantizer) { + return new org.apache.lucene.geo.Circle( + quantizer.quantizeLat(circle.getLat()), + quantizer.quantizeLon(circle.getLon()), + circle.getRadiusMeters() + ); + } + + /** + * Transform an Elasticsearch {@link Geometry} into a lucene {@link XYGeometry} + * + * @param geometry the geometry to transform. + * @param checker call for every {@link ShapeType} found in the Geometry. It allows to throw an error if + * a geometry is not supported. + * @return an array of {@link XYGeometry} + */ + public static XYGeometry[] toXYGeometry(Geometry geometry, Consumer checker) { + if (geometry == null || geometry.isEmpty()) { + return new XYGeometry[0]; + } + final List geometries = new ArrayList<>(); + geometry.visit(new GeometryVisitor<>() { + @Override + public Void visit(Circle circle) { + checker.accept(ShapeType.CIRCLE); + if (circle.isEmpty() == false) { + geometries.add(toXYCircle(circle)); + } + return null; + } + + @Override + public Void visit(GeometryCollection collection) { + checker.accept(ShapeType.GEOMETRYCOLLECTION); + if (collection.isEmpty() == false) { + for (org.elasticsearch.geometry.Geometry shape : collection) { + shape.visit(this); + } + } + return null; + } + + @Override + public Void visit(org.elasticsearch.geometry.Line line) { + checker.accept(ShapeType.LINESTRING); + if (line.isEmpty() == false) { + geometries.add(toXYLine(line)); + } + return null; + } + + @Override + public Void visit(LinearRing ring) { + throw new IllegalArgumentException("Found an unsupported shape LinearRing"); + } + + @Override + public Void visit(MultiLine multiLine) { + checker.accept(ShapeType.MULTILINESTRING); + if (multiLine.isEmpty() == false) { + for (Line line : multiLine) { + visit(line); + } + } + return null; + } + + @Override + public Void visit(MultiPoint multiPoint) { + checker.accept(ShapeType.MULTIPOINT); + if (multiPoint.isEmpty() == false) { + for (Point point : multiPoint) { + visit(point); + } + } + return null; + } + + @Override + public Void visit(MultiPolygon multiPolygon) { + checker.accept(ShapeType.MULTIPOLYGON); + if (multiPolygon.isEmpty() == false) { + for (Polygon polygon : multiPolygon) { + visit(polygon); + } + } + return null; + } + + @Override + public Void visit(Point point) { + checker.accept(ShapeType.POINT); + if (point.isEmpty() == false) { + geometries.add(toXYPoint(point)); + } + return null; + } + + @Override + public Void visit(org.elasticsearch.geometry.Polygon polygon) { + checker.accept(ShapeType.POLYGON); + if (polygon.isEmpty() == false) { + geometries.add(toXYPolygon(polygon)); + } + return null; + } + + @Override + public Void visit(Rectangle r) { + checker.accept(ShapeType.ENVELOPE); + if (r.isEmpty() == false) { + geometries.add(toXYRectangle(r)); + } + return null; + } + }); + return geometries.toArray(new XYGeometry[0]); + } + + /** + * Transform an Elasticsearch {@link Point} into a lucene {@link org.apache.lucene.geo.XYPoint} + */ + public static org.apache.lucene.geo.XYPoint toXYPoint(Point point) { + return new org.apache.lucene.geo.XYPoint((float) point.getX(), (float) point.getY()); + } + + /** + * Transform an Elasticsearch {@link Line} into a lucene {@link org.apache.lucene.geo.XYLine} + */ + public static org.apache.lucene.geo.XYLine toXYLine(Line line) { + return new org.apache.lucene.geo.XYLine(doubleArrayToFloatArray(line.getX()), doubleArrayToFloatArray(line.getY())); + } + + /** + * Transform an Elasticsearch {@link Polygon} into a lucene {@link org.apache.lucene.geo.XYPolygon} + */ + public static org.apache.lucene.geo.XYPolygon toXYPolygon(Polygon polygon) { + org.apache.lucene.geo.XYPolygon[] holes = new org.apache.lucene.geo.XYPolygon[polygon.getNumberOfHoles()]; + for (int i = 0; i < holes.length; i++) { + holes[i] = new org.apache.lucene.geo.XYPolygon( + doubleArrayToFloatArray(polygon.getHole(i).getX()), + doubleArrayToFloatArray(polygon.getHole(i).getY()) + ); + } + return new org.apache.lucene.geo.XYPolygon( + doubleArrayToFloatArray(polygon.getPolygon().getX()), + doubleArrayToFloatArray(polygon.getPolygon().getY()), + holes + ); + } + + /** + * Transform an Elasticsearch {@link Rectangle} into a lucene {@link org.apache.lucene.geo.XYRectangle} + */ + public static org.apache.lucene.geo.XYRectangle toXYRectangle(Rectangle r) { + return new org.apache.lucene.geo.XYRectangle((float) r.getMinX(), (float) r.getMaxX(), (float) r.getMinY(), (float) r.getMaxY()); + } + + /** + * Transform an Elasticsearch {@link Circle} into a lucene {@link org.apache.lucene.geo.XYCircle} + */ + public static org.apache.lucene.geo.XYCircle toXYCircle(Circle circle) { + return new org.apache.lucene.geo.XYCircle((float) circle.getX(), (float) circle.getY(), (float) circle.getRadiusMeters()); + } + + static float[] doubleArrayToFloatArray(double[] array) { + float[] result = new float[array.length]; + for (int i = 0; i < array.length; ++i) { + result[i] = (float) array[i]; + } + return result; + } + + private LuceneGeometriesUtils() {} +} diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index f4edb8b1d4039..706a6ec8ccf02 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -143,6 +143,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.value(id); } + /** + * Returns a string representing the Elasticsearch release version of this index version, + * if applicable for this deployment, otherwise the raw version number. + */ + public String toReleaseVersion() { + return IndexVersions.VERSION_LOOKUP.apply(id); + } + @Override public String toString() { return Integer.toString(id); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index f4be80513a553..1fd64671a53d7 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -97,9 +97,11 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion NORI_DUPLICATES = def(8_500_007, Version.LUCENE_9_9_0); public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); public static final IndexVersion ES_VERSION_8_12_1 = def(8_500_009, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_8_12_1_LUCENE_9_9_2 = def(8_500_010, Version.LUCENE_9_9_2); public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_00_0, Version.LUCENE_9_9_2); - public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_502_00_0, Version.LUCENE_9_10_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, @@ -209,9 +211,8 @@ static Collection getAllVersions() { return VERSION_IDS.values(); } - private static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class); + static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class); - public static String toReleaseVersion(IndexVersion version) { - return VERSION_LOOKUP.apply(version.id()); - } + // no instance + private IndexVersions() {} } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 842ec5ba3b467..c3f324fc49e82 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -811,7 +811,7 @@ private GetResult getFromTranslog( index, mappingLookup, documentParser, - config().getAnalyzer(), + config(), translogInMemorySegmentsCount::incrementAndGet ); final Engine.Searcher searcher = new Engine.Searcher( diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index ab84166701c59..2d0c3e8bc1feb 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; @@ -83,10 +82,10 @@ final class TranslogDirectoryReader extends DirectoryReader { Translog.Index operation, MappingLookup mappingLookup, DocumentParser documentParser, - Analyzer analyzer, + EngineConfig engineConfig, Runnable onSegmentCreated ) throws IOException { - this(new TranslogLeafReader(shardId, operation, mappingLookup, documentParser, analyzer, onSegmentCreated)); + this(new TranslogLeafReader(shardId, operation, mappingLookup, documentParser, engineConfig, onSegmentCreated)); } private TranslogDirectoryReader(TranslogLeafReader leafReader) throws IOException { @@ -208,7 +207,7 @@ private static class TranslogLeafReader extends LeafReader { private final Translog.Index operation; private final MappingLookup mappingLookup; private final DocumentParser documentParser; - private final Analyzer analyzer; + private final EngineConfig engineConfig; private final Directory directory; private final Runnable onSegmentCreated; @@ -220,14 +219,14 @@ private static class TranslogLeafReader extends LeafReader { Translog.Index operation, MappingLookup mappingLookup, DocumentParser documentParser, - Analyzer analyzer, + EngineConfig engineConfig, Runnable onSegmentCreated ) { this.shardId = shardId; this.operation = operation; this.mappingLookup = mappingLookup; this.documentParser = documentParser; - this.analyzer = analyzer; + this.engineConfig = engineConfig; this.onSegmentCreated = onSegmentCreated; this.directory = new ByteBuffersDirectory(); this.uid = Uid.encodeId(operation.id()); @@ -267,7 +266,10 @@ private LeafReader createInMemoryLeafReader() { parsedDocs.updateSeqID(operation.seqNo(), operation.primaryTerm()); parsedDocs.version().setLongValue(operation.version()); - final IndexWriterConfig writeConfig = new IndexWriterConfig(analyzer).setOpenMode(IndexWriterConfig.OpenMode.CREATE); + // To guarantee indexability, we configure the analyzer and codec using the main engine configuration + final IndexWriterConfig writeConfig = new IndexWriterConfig(engineConfig.getAnalyzer()).setOpenMode( + IndexWriterConfig.OpenMode.CREATE + ).setCodec(engineConfig.getCodec()); try (IndexWriter writer = new IndexWriter(directory, writeConfig)) { writer.addDocument(parsedDocs.rootDoc()); final DirectoryReader reader = open(writer); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java index 7ec9ec4fd947f..23879282799ab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeometryNormalizer; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; @@ -94,7 +95,7 @@ public Void visit(GeometryCollection collection) { @Override public Void visit(Line line) { - addFields(LatLonShape.createIndexableFields(name, toLuceneLine(line))); + addFields(LatLonShape.createIndexableFields(name, LuceneGeometriesUtils.toLatLonLine(line))); return null; } @@ -135,7 +136,7 @@ public Void visit(Point point) { @Override public Void visit(Polygon polygon) { - addFields(LatLonShape.createIndexableFields(name, toLucenePolygon(polygon), true)); + addFields(LatLonShape.createIndexableFields(name, LuceneGeometriesUtils.toLatLonPolygon(polygon), true)); return null; } @@ -199,22 +200,10 @@ private void addFields(IndexableField[] fields) { } } - private static org.apache.lucene.geo.Polygon toLucenePolygon(Polygon polygon) { - org.apache.lucene.geo.Polygon[] holes = new org.apache.lucene.geo.Polygon[polygon.getNumberOfHoles()]; - for (int i = 0; i < holes.length; i++) { - holes[i] = new org.apache.lucene.geo.Polygon(polygon.getHole(i).getY(), polygon.getHole(i).getX()); - } - return new org.apache.lucene.geo.Polygon(polygon.getPolygon().getY(), polygon.getPolygon().getX(), holes); - } - private static org.apache.lucene.geo.Polygon toLucenePolygon(Rectangle r) { return new org.apache.lucene.geo.Polygon( new double[] { r.getMinLat(), r.getMinLat(), r.getMaxLat(), r.getMaxLat(), r.getMinLat() }, new double[] { r.getMinLon(), r.getMaxLon(), r.getMaxLon(), r.getMinLon(), r.getMinLon() } ); } - - private static org.apache.lucene.geo.Line toLuceneLine(Line line) { - return new org.apache.lucene.geo.Line(line.getLats(), line.getLons()); - } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java index beb594d9e9936..3947f009f1aec 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java @@ -8,32 +8,18 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.geo.LatLonGeometry; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.common.geo.GeometryNormalizer; -import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; -import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.geometry.GeometryCollection; -import org.elasticsearch.geometry.GeometryVisitor; -import org.elasticsearch.geometry.Line; -import org.elasticsearch.geometry.LinearRing; -import org.elasticsearch.geometry.MultiLine; -import org.elasticsearch.geometry.MultiPoint; -import org.elasticsearch.geometry.MultiPolygon; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.function.Consumer; /** * Implemented by {@link org.elasticsearch.index.mapper.MappedFieldType} that support @@ -43,10 +29,18 @@ public interface GeoShapeQueryable { Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... luceneGeometries); - default Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, Geometry shape) { + default Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, Geometry geometry) { + final Consumer checker = relation == ShapeRelation.WITHIN ? t -> { + if (t == ShapeType.LINESTRING) { + // Line geometries and WITHIN relation is not supported by Lucene. Throw an error here + // to have same behavior for runtime fields. + throw new IllegalArgumentException("found an unsupported shape Line"); + } + } : t -> {}; final LatLonGeometry[] luceneGeometries; try { - luceneGeometries = toQuantizeLuceneGeometry(shape, relation); + // quantize the geometries to match the values on the index + luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, checker); } catch (IllegalArgumentException e) { throw new QueryShardException(context, "Exception creating query on Field [" + fieldName + "] " + e.getMessage(), e); } @@ -66,157 +60,4 @@ default Query geoShapeQuery( ) { return geoShapeQuery(context, fieldName, relation, shape); } - - private static double quantizeLat(double lat) { - return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); - } - - private static double[] quantizeLats(double[] lats) { - return Arrays.stream(lats).map(GeoShapeQueryable::quantizeLat).toArray(); - } - - private static double quantizeLon(double lon) { - return GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon)); - } - - private static double[] quantizeLons(double[] lons) { - return Arrays.stream(lons).map(GeoShapeQueryable::quantizeLon).toArray(); - } - - /** - * transforms an Elasticsearch {@link Geometry} into a lucene {@link LatLonGeometry} and quantize - * the latitude and longitude values to match the values on the index. - */ - static LatLonGeometry[] toQuantizeLuceneGeometry(Geometry geometry, ShapeRelation relation) { - if (geometry == null) { - return new LatLonGeometry[0]; - } - if (GeometryNormalizer.needsNormalize(Orientation.CCW, geometry)) { - // make geometry lucene friendly - geometry = GeometryNormalizer.apply(Orientation.CCW, geometry); - } - if (geometry.isEmpty()) { - return new LatLonGeometry[0]; - } - final List geometries = new ArrayList<>(); - geometry.visit(new GeometryVisitor<>() { - @Override - public Void visit(Circle circle) { - if (circle.isEmpty() == false) { - geometries.add( - new org.apache.lucene.geo.Circle( - quantizeLat(circle.getLat()), - quantizeLon(circle.getLon()), - circle.getRadiusMeters() - ) - ); - } - return null; - } - - @Override - public Void visit(GeometryCollection collection) { - if (collection.isEmpty() == false) { - for (Geometry shape : collection) { - shape.visit(this); - } - } - return null; - } - - @Override - public Void visit(org.elasticsearch.geometry.Line line) { - if (line.isEmpty() == false) { - if (relation == ShapeRelation.WITHIN) { - // Line geometries and WITHIN relation is not supported by Lucene. Throw an error here - // to have same behavior for runtime fields. - throw new IllegalArgumentException("found an unsupported shape Line"); - } - geometries.add(new org.apache.lucene.geo.Line(quantizeLats(line.getLats()), quantizeLons(line.getLons()))); - } - return null; - } - - @Override - public Void visit(LinearRing ring) { - throw new IllegalArgumentException("Found an unsupported shape LinearRing"); - } - - @Override - public Void visit(MultiLine multiLine) { - if (multiLine.isEmpty() == false) { - for (Line line : multiLine) { - visit(line); - } - } - return null; - } - - @Override - public Void visit(MultiPoint multiPoint) { - if (multiPoint.isEmpty() == false) { - for (Point point : multiPoint) { - visit(point); - } - } - return null; - } - - @Override - public Void visit(MultiPolygon multiPolygon) { - if (multiPolygon.isEmpty() == false) { - for (Polygon polygon : multiPolygon) { - visit(polygon); - } - } - return null; - } - - @Override - public Void visit(Point point) { - if (point.isEmpty() == false) { - geometries.add(new org.apache.lucene.geo.Point(quantizeLat(point.getLat()), quantizeLon(point.getLon()))); - } - return null; - - } - - @Override - public Void visit(org.elasticsearch.geometry.Polygon polygon) { - if (polygon.isEmpty() == false) { - org.apache.lucene.geo.Polygon[] holes = new org.apache.lucene.geo.Polygon[polygon.getNumberOfHoles()]; - for (int i = 0; i < holes.length; i++) { - holes[i] = new org.apache.lucene.geo.Polygon( - quantizeLats(polygon.getHole(i).getY()), - quantizeLons(polygon.getHole(i).getX()) - ); - } - geometries.add( - new org.apache.lucene.geo.Polygon( - quantizeLats(polygon.getPolygon().getY()), - quantizeLons(polygon.getPolygon().getX()), - holes - ) - ); - } - return null; - } - - @Override - public Void visit(Rectangle r) { - if (r.isEmpty() == false) { - geometries.add( - new org.apache.lucene.geo.Rectangle( - quantizeLat(r.getMinLat()), - quantizeLat(r.getMaxLat()), - quantizeLon(r.getMinLon()), - quantizeLon(r.getMaxLon()) - ) - ); - } - return null; - } - }); - return geometries.toArray(new LatLonGeometry[0]); - } } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 235de51d22572..fdeb32de33877 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -78,7 +78,13 @@ default void init(Client client) {} * @param taskSettings Settings in the request to override the model's defaults * @param listener Inference result listener */ - void infer(Model model, List input, Map taskSettings, ActionListener listener); + void infer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ); /** * Start or prepare the model for use. diff --git a/server/src/main/java/org/elasticsearch/inference/InputType.java b/server/src/main/java/org/elasticsearch/inference/InputType.java index ffc67995c1dda..19f28601409ac 100644 --- a/server/src/main/java/org/elasticsearch/inference/InputType.java +++ b/server/src/main/java/org/elasticsearch/inference/InputType.java @@ -15,9 +15,8 @@ */ public enum InputType { INGEST, - SEARCH; - - public static String NAME = "input_type"; + SEARCH, + UNSPECIFIED; @Override public String toString() { diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 1dae328752bdc..0795fef891f91 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -974,7 +974,7 @@ record PluginServiceInstances( repositoryService ); - final TimeValue metricsInterval = settings.getAsTime("tracing.apm.agent.metrics_interval", TimeValue.timeValueSeconds(10)); + final TimeValue metricsInterval = settings.getAsTime("telemetry.agent.metrics_interval", TimeValue.timeValueSeconds(10)); final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService, metricsInterval); final SearchService searchService = serviceProvider.newSearchService( diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index d5b2565187a3f..3b053e80d35b7 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1059,7 +1059,7 @@ protected SearchContext createContext( return context; } - public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { + public SearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); final IndexShard indexShard = indexService.getShard(request.shardId().getId()); final Engine.SearcherSupplier reader = indexShard.acquireSearcherSupplier(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java deleted file mode 100644 index 3e15488cc430b..0000000000000 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.search.aggregations; - -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject; - -/** - * Represents a set of {@link Aggregation}s - */ -public class Aggregations implements Iterable, ToXContentFragment { - - public static final String AGGREGATIONS_FIELD = "aggregations"; - - protected final List aggregations; - private Map aggregationsAsMap; - - public Aggregations(List aggregations) { - this.aggregations = aggregations; - if (aggregations.isEmpty()) { - aggregationsAsMap = emptyMap(); - } - } - - /** - * Iterates over the {@link Aggregation}s. - */ - @Override - public final Iterator iterator() { - return Iterators.map(aggregations.iterator(), p -> (Aggregation) p); - } - - /** - * The list of {@link Aggregation}s. - */ - public final List asList() { - return Collections.unmodifiableList(aggregations); - } - - /** - * Returns the {@link Aggregation}s keyed by aggregation name. - */ - public final Map asMap() { - return getAsMap(); - } - - /** - * Returns the {@link Aggregation}s keyed by aggregation name. - */ - public final Map getAsMap() { - if (aggregationsAsMap == null) { - Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size()); - for (Aggregation aggregation : aggregations) { - newAggregationsAsMap.put(aggregation.getName(), aggregation); - } - this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap); - } - return aggregationsAsMap; - } - - /** - * Returns the aggregation that is associated with the specified name. - */ - @SuppressWarnings("unchecked") - public final A get(String name) { - return (A) asMap().get(name); - } - - @Override - public final boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - return aggregations.equals(((Aggregations) obj).aggregations); - } - - @Override - public final int hashCode() { - return Objects.hash(getClass(), aggregations); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (aggregations.isEmpty()) { - return builder; - } - builder.startObject(AGGREGATIONS_FIELD); - toXContentInternal(builder, params); - return builder.endObject(); - } - - /** - * Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs) - */ - public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException { - for (Aggregation aggregation : aggregations) { - aggregation.toXContent(builder, params); - } - return builder; - } - - public static Aggregations fromXContent(XContentParser parser) throws IOException { - final List aggregations = new ArrayList<>(); - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.START_OBJECT) { - SetOnce typedAgg = new SetOnce<>(); - String currentField = parser.currentName(); - parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, typedAgg::set); - if (typedAgg.get() != null) { - aggregations.add(typedAgg.get()); - } else { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField) - ); - } - } - } - return new Aggregations(aggregations); - } -} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index c1c54f80987f0..0c299bce7c29d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -7,32 +7,45 @@ */ package org.elasticsearch.search.aggregations; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.sort.SortValue; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; +import static java.util.Collections.unmodifiableList; +import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject; + /** - * An internal implementation of {@link Aggregations}. + * Represents a set of {@link InternalAggregation}s */ -public final class InternalAggregations extends Aggregations implements Writeable { +public final class InternalAggregations implements Iterable, ToXContentFragment, Writeable { + + public static final String AGGREGATIONS_FIELD = "aggregations"; - public static final InternalAggregations EMPTY = new InternalAggregations(Collections.emptyList()); + public static final InternalAggregations EMPTY = new InternalAggregations(List.of()); private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { if (agg1.canLeadReduction() == agg2.canLeadReduction()) { @@ -44,11 +57,115 @@ public final class InternalAggregations extends Aggregations implements Writeabl } }; + private final List aggregations; + private Map aggregationsAsMap; + /** * Constructs a new aggregation. */ private InternalAggregations(List aggregations) { - super(aggregations); + this.aggregations = aggregations; + if (aggregations.isEmpty()) { + aggregationsAsMap = Map.of(); + } + } + + /** + * Iterates over the {@link InternalAggregation}s. + */ + @Override + public Iterator iterator() { + return aggregations.iterator(); + } + + /** + * The list of {@link InternalAggregation}s. + */ + public List asList() { + return unmodifiableList(aggregations); + } + + /** + * Returns the {@link InternalAggregation}s keyed by aggregation name. + */ + public Map asMap() { + return getAsMap(); + } + + /** + * Returns the {@link InternalAggregation}s keyed by aggregation name. + */ + public Map getAsMap() { + if (aggregationsAsMap == null) { + Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size()); + for (InternalAggregation aggregation : aggregations) { + newAggregationsAsMap.put(aggregation.getName(), aggregation); + } + this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap); + } + return aggregationsAsMap; + } + + /** + * Returns the aggregation that is associated with the specified name. + */ + @SuppressWarnings("unchecked") + public A get(String name) { + return (A) asMap().get(name); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + return aggregations.equals(((InternalAggregations) obj).aggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), aggregations); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (aggregations.isEmpty()) { + return builder; + } + builder.startObject(AGGREGATIONS_FIELD); + toXContentInternal(builder, params); + return builder.endObject(); + } + + /** + * Directly write all the aggregations without their bounding object. Used by sub-aggregations (non top level aggs) + */ + public XContentBuilder toXContentInternal(XContentBuilder builder, Params params) throws IOException { + for (InternalAggregation aggregation : aggregations) { + aggregation.toXContent(builder, params); + } + return builder; + } + + public static InternalAggregations fromXContent(XContentParser parser) throws IOException { + final List aggregations = new ArrayList<>(); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT) { + SetOnce typedAgg = new SetOnce<>(); + String currentField = parser.currentName(); + parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set); + if (typedAgg.get() != null) { + aggregations.add(typedAgg.get()); + } else { + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField) + ); + } + } + } + return new InternalAggregations(aggregations); } public static InternalAggregations from(List aggregations) { @@ -74,9 +191,8 @@ public List copyResults() { return new ArrayList<>(getInternalAggregations()); } - @SuppressWarnings("unchecked") private List getInternalAggregations() { - return (List) aggregations; + return aggregations; } /** @@ -138,12 +254,12 @@ public static InternalAggregations reduce(List aggregation // first we collect all aggregations of the same type and list them together Map> aggByName = new HashMap<>(); for (InternalAggregations aggregations : aggregationsList) { - for (Aggregation aggregation : aggregations.aggregations) { + for (InternalAggregation aggregation : aggregations.aggregations) { List aggs = aggByName.computeIfAbsent( aggregation.getName(), k -> new ArrayList<>(aggregationsList.size()) ); - aggs.add((InternalAggregation) aggregation); + aggs.add(aggregation); } } @@ -173,9 +289,7 @@ public static InternalAggregations reduce(List aggregation */ public static InternalAggregations finalizeSampling(InternalAggregations internalAggregations, SamplingContext samplingContext) { return from( - internalAggregations.aggregations.stream() - .map(agg -> ((InternalAggregation) agg).finalizeSampling(samplingContext)) - .collect(Collectors.toList()) + internalAggregations.aggregations.stream().map(agg -> agg.finalizeSampling(samplingContext)).collect(Collectors.toList()) ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index 4d519d678d96b..dda632e7aa020 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -211,7 +211,7 @@ public Object getProperty(String containingAggName, List path) { if (path.isEmpty()) { return this; } - Aggregations aggregations = getAggregations(); + InternalAggregations aggregations = getAggregations(); String aggName = path.get(0); if (aggName.equals("_count")) { if (path.size() > 1) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java index b444a1ef8f4d7..5b72b1396def2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java @@ -85,9 +85,7 @@ public InternalAggregation reduce(List aggregations, Aggreg InternalAggregations aggs = InternalAggregations.reduce(subAggregationsList, reduceContext); if (reduceContext.isFinalReduce() && aggs != null) { SamplingContext context = buildContext(); - aggs = InternalAggregations.from( - aggs.asList().stream().map(agg -> ((InternalAggregation) agg).finalizeSampling(context)).toList() - ); + aggs = InternalAggregations.from(aggs.asList().stream().map(agg -> agg.finalizeSampling(context)).toList()); } return newAggregation(getName(), docCount, aggs); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java index 5661edce6eb89..59317944930ec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java @@ -12,8 +12,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -46,7 +46,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg } @Override - public final InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) { + public final InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) { preCollection(); List parsedPath = AggregationPath.parse(bucketsPaths()[0]).getPathElements(); for (Aggregation aggregation : aggregations) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index c5e52448223c0..7f18b87adce3e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -80,7 +80,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe newBuckets.add(bucket); } else { final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toCollection(ArrayList::new)); InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java index c51c60bf24ee5..2e2c46ac0b38a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java @@ -54,7 +54,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toCollection(ArrayList::new)); aggs.add(new InternalSimpleValue(name(), sum, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java index 7225d7652b3b8..c7eb662efebd5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java @@ -85,7 +85,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe double diff = thisBucketValue - lagValue; List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toCollection(ArrayList::new)); aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index 9c63e13afa039..7b82cd38881df 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -30,5 +29,5 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe }); } - public abstract InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context); + public abstract InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java new file mode 100644 index 0000000000000..8843801e528a3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +public class BulkRequestBuilderTests extends ESTestCase { + + public void testValidation() { + BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(null, null); + bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); + bulkRequestBuilder.add(new IndexRequest()); + expectThrows(IllegalStateException.class, bulkRequestBuilder::request); + + bulkRequestBuilder = new BulkRequestBuilder(null, null); + bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); + bulkRequestBuilder.setTimeout(randomTimeValue()); + bulkRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); + expectThrows(IllegalStateException.class, bulkRequestBuilder::request); + + bulkRequestBuilder = new BulkRequestBuilder(null, null); + bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); + bulkRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values()).getValue()); + bulkRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + expectThrows(IllegalStateException.class, bulkRequestBuilder::request); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java index e2f67d9387ff5..9af522524abc9 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java @@ -53,16 +53,20 @@ public void testSetSource() throws Exception { indexRequestBuilder.setSource(source); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + indexRequestBuilder = new IndexRequestBuilder(this.testClient); indexRequestBuilder.setSource(source, XContentType.JSON); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + indexRequestBuilder = new IndexRequestBuilder(this.testClient); indexRequestBuilder.setSource("SomeKey", "SomeValue"); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); // force the Object... setter + indexRequestBuilder = new IndexRequestBuilder(this.testClient); indexRequestBuilder.setSource((Object) "SomeKey", "SomeValue"); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + indexRequestBuilder = new IndexRequestBuilder(this.testClient); ByteArrayOutputStream docOut = new ByteArrayOutputStream(); XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field("SomeKey", "SomeValue").endObject(); doc.close(); @@ -72,6 +76,7 @@ public void testSetSource() throws Exception { XContentHelper.convertToJson(indexRequestBuilder.request().source(), true, indexRequestBuilder.request().getContentType()) ); + indexRequestBuilder = new IndexRequestBuilder(this.testClient); doc = XContentFactory.jsonBuilder().startObject().field("SomeKey", "SomeValue").endObject(); doc.close(); indexRequestBuilder.setSource(doc); diff --git a/server/src/test/java/org/elasticsearch/common/geo/LuceneGeometriesUtilsTests.java b/server/src/test/java/org/elasticsearch/common/geo/LuceneGeometriesUtilsTests.java new file mode 100644 index 0000000000000..96cc73e2cff4c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/geo/LuceneGeometriesUtilsTests.java @@ -0,0 +1,476 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.geo; + +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.XYGeometry; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.GeometryCollection; +import org.elasticsearch.geometry.Line; +import org.elasticsearch.geometry.LinearRing; +import org.elasticsearch.geometry.MultiLine; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.MultiPolygon; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class LuceneGeometriesUtilsTests extends ESTestCase { + + public void testLatLonPoint() { + Point point = GeometryTestUtils.randomPoint(); + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(point, false, t -> assertEquals(ShapeType.POINT, t)); + assertEquals(1, geometries.length); + assertLatLonPoint(point, geometries[0]); + } + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(point, true, t -> assertEquals(ShapeType.POINT, t)); + assertEquals(1, geometries.length); + assertLatLonPoint(quantize(point), geometries[0]); + } + } + + public void testLatLonMultiPoint() { + MultiPoint multiPoint = GeometryTestUtils.randomMultiPoint(randomBoolean()); + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiPoint, false, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOINT, t); + } else { + assertEquals(ShapeType.POINT, t); + } + }); + assertEquals(multiPoint.size(), geometries.length); + for (int i = 0; i < multiPoint.size(); i++) { + assertLatLonPoint(multiPoint.get(i), geometries[i]); + } + } + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiPoint, true, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOINT, t); + } else { + assertEquals(ShapeType.POINT, t); + } + }); + assertEquals(multiPoint.size(), geometries.length); + for (int i = 0; i < multiPoint.size(); i++) { + assertLatLonPoint(quantize(multiPoint.get(i)), geometries[i]); + } + } + } + + private void assertLatLonPoint(Point point, LatLonGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.Point.class)); + org.apache.lucene.geo.Point lalonPoint = (org.apache.lucene.geo.Point) geometry; + assertThat(lalonPoint.getLon(), equalTo(point.getLon())); + assertThat(lalonPoint.getLat(), equalTo(point.getLat())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toLatLonPoint(point))); + } + + public void testXYPoint() { + Point point = ShapeTestUtils.randomPoint(); + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(point, t -> assertEquals(ShapeType.POINT, t)); + assertEquals(1, geometries.length); + assertXYPoint(point, geometries[0]); + assertThat(geometries[0], instanceOf(org.apache.lucene.geo.XYPoint.class)); + } + + public void testXYMultiPoint() { + MultiPoint multiPoint = ShapeTestUtils.randomMultiPoint(randomBoolean()); + int[] counter = new int[] { 0 }; + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(multiPoint, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOINT, t); + } else { + assertEquals(ShapeType.POINT, t); + } + }); + assertEquals(multiPoint.size(), geometries.length); + for (int i = 0; i < multiPoint.size(); i++) { + assertXYPoint(multiPoint.get(i), geometries[i]); + } + } + + private void assertXYPoint(Point point, XYGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.XYPoint.class)); + org.apache.lucene.geo.XYPoint xyPoint = (org.apache.lucene.geo.XYPoint) geometry; + assertThat(xyPoint.getX(), equalTo((float) point.getX())); + assertThat(xyPoint.getY(), equalTo((float) point.getY())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toXYPoint(point))); + } + + public void testLatLonLine() { + Line line = GeometryTestUtils.randomLine(randomBoolean()); + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(line, false, t -> assertEquals(ShapeType.LINESTRING, t)); + assertEquals(1, geometries.length); + assertLatLonLine(line, geometries[0]); + } + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(line, true, t -> assertEquals(ShapeType.LINESTRING, t)); + assertEquals(1, geometries.length); + assertLatLonLine(quantize(line), geometries[0]); + } + } + + public void testLatLonMultiLine() { + MultiLine multiLine = GeometryTestUtils.randomMultiLine(randomBoolean()); + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiLine, false, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTILINESTRING, t); + } else { + assertEquals(ShapeType.LINESTRING, t); + } + }); + assertEquals(multiLine.size(), geometries.length); + for (int i = 0; i < multiLine.size(); i++) { + assertLatLonLine(multiLine.get(i), geometries[i]); + } + } + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiLine, true, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTILINESTRING, t); + } else { + assertEquals(ShapeType.LINESTRING, t); + } + }); + assertEquals(multiLine.size(), geometries.length); + for (int i = 0; i < multiLine.size(); i++) { + assertLatLonLine(quantize(multiLine.get(i)), geometries[i]); + } + } + } + + private void assertLatLonLine(Line line, LatLonGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.Line.class)); + org.apache.lucene.geo.Line lalonLine = (org.apache.lucene.geo.Line) geometry; + assertThat(lalonLine.getLons(), equalTo(line.getLons())); + assertThat(lalonLine.getLats(), equalTo(line.getLats())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toLatLonLine(line))); + } + + public void testXYLine() { + Line line = ShapeTestUtils.randomLine(randomBoolean()); + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(line, t -> assertEquals(ShapeType.LINESTRING, t)); + assertEquals(1, geometries.length); + assertXYLine(line, geometries[0]); + } + + public void testXYMultiLine() { + MultiLine multiLine = ShapeTestUtils.randomMultiLine(randomBoolean()); + int[] counter = new int[] { 0 }; + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(multiLine, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTILINESTRING, t); + } else { + assertEquals(ShapeType.LINESTRING, t); + } + }); + assertEquals(multiLine.size(), geometries.length); + for (int i = 0; i < multiLine.size(); i++) { + assertXYLine(multiLine.get(i), geometries[i]); + } + } + + private void assertXYLine(Line line, XYGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.XYLine.class)); + org.apache.lucene.geo.XYLine xyLine = (org.apache.lucene.geo.XYLine) geometry; + assertThat(xyLine.getX(), equalTo(LuceneGeometriesUtils.doubleArrayToFloatArray(line.getLons()))); + assertThat(xyLine.getY(), equalTo(LuceneGeometriesUtils.doubleArrayToFloatArray(line.getLats()))); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toXYLine(line))); + } + + public void testLatLonPolygon() { + Polygon polygon = validRandomPolygon(randomBoolean()); + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(polygon, false, t -> assertEquals(ShapeType.POLYGON, t)); + assertEquals(1, geometries.length); + assertLatLonPolygon(polygon, geometries[0]); + } + { + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(polygon, true, t -> assertEquals(ShapeType.POLYGON, t)); + assertEquals(1, geometries.length); + assertLatLonPolygon(quantize(polygon), geometries[0]); + } + } + + public void testLatLonMultiPolygon() { + MultiPolygon multiPolygon = validRandomMultiPolygon(randomBoolean()); + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiPolygon, false, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOLYGON, t); + } else { + assertEquals(ShapeType.POLYGON, t); + } + }); + assertEquals(multiPolygon.size(), geometries.length); + for (int i = 0; i < multiPolygon.size(); i++) { + assertLatLonPolygon(multiPolygon.get(i), geometries[i]); + } + } + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(multiPolygon, true, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOLYGON, t); + } else { + assertEquals(ShapeType.POLYGON, t); + } + }); + assertEquals(multiPolygon.size(), geometries.length); + for (int i = 0; i < multiPolygon.size(); i++) { + assertLatLonPolygon(quantize(multiPolygon.get(i)), geometries[i]); + } + } + } + + private void assertLatLonPolygon(Polygon polygon, LatLonGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.Polygon.class)); + org.apache.lucene.geo.Polygon lalonPolygon = (org.apache.lucene.geo.Polygon) geometry; + assertThat(lalonPolygon.getPolyLons(), equalTo(polygon.getPolygon().getLons())); + assertThat(lalonPolygon.getPolyLats(), equalTo(polygon.getPolygon().getLats())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toLatLonPolygon(polygon))); + } + + public void testXYPolygon() { + Polygon polygon = ShapeTestUtils.randomPolygon(randomBoolean()); + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(polygon, t -> assertEquals(ShapeType.POLYGON, t)); + assertEquals(1, geometries.length); + assertXYPolygon(polygon, geometries[0]); + } + + public void testXYMultiPolygon() { + MultiPolygon multiPolygon = ShapeTestUtils.randomMultiPolygon(randomBoolean()); + int[] counter = new int[] { 0 }; + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(multiPolygon, t -> { + if (counter[0]++ == 0) { + assertEquals(ShapeType.MULTIPOLYGON, t); + } else { + assertEquals(ShapeType.POLYGON, t); + } + }); + assertEquals(multiPolygon.size(), geometries.length); + for (int i = 0; i < multiPolygon.size(); i++) { + assertXYPolygon(multiPolygon.get(i), geometries[i]); + } + } + + private void assertXYPolygon(Polygon polygon, XYGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.XYPolygon.class)); + org.apache.lucene.geo.XYPolygon xyPolygon = (org.apache.lucene.geo.XYPolygon) geometry; + assertThat(xyPolygon.getPolyX(), equalTo(LuceneGeometriesUtils.doubleArrayToFloatArray(polygon.getPolygon().getX()))); + assertThat(xyPolygon.getPolyY(), equalTo(LuceneGeometriesUtils.doubleArrayToFloatArray(polygon.getPolygon().getY()))); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toXYPolygon(polygon))); + } + + public void testLatLonGeometryCollection() { + boolean hasZ = randomBoolean(); + Point point = GeometryTestUtils.randomPoint(hasZ); + Line line = GeometryTestUtils.randomLine(hasZ); + Polygon polygon = validRandomPolygon(hasZ); + GeometryCollection geometryCollection = new GeometryCollection<>(List.of(point, line, polygon)); + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(geometryCollection, false, t -> { + if (counter[0] == 0) { + assertEquals(ShapeType.GEOMETRYCOLLECTION, t); + } else if (counter[0] == 1) { + assertEquals(ShapeType.POINT, t); + } else if (counter[0] == 2) { + assertEquals(ShapeType.LINESTRING, t); + } else if (counter[0] == 3) { + assertEquals(ShapeType.POLYGON, t); + } else { + fail("Unexpected counter value"); + } + counter[0]++; + }); + assertEquals(geometryCollection.size(), geometries.length); + assertLatLonPoint(point, geometries[0]); + assertLatLonLine(line, geometries[1]); + assertLatLonPolygon(polygon, geometries[2]); + } + { + int[] counter = new int[] { 0 }; + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(geometryCollection, true, t -> { + if (counter[0] == 0) { + assertEquals(ShapeType.GEOMETRYCOLLECTION, t); + } else if (counter[0] == 1) { + assertEquals(ShapeType.POINT, t); + } else if (counter[0] == 2) { + assertEquals(ShapeType.LINESTRING, t); + } else if (counter[0] == 3) { + assertEquals(ShapeType.POLYGON, t); + } else { + fail("Unexpected counter value"); + } + counter[0]++; + }); + assertEquals(geometryCollection.size(), geometries.length); + assertLatLonPoint(quantize(point), geometries[0]); + assertLatLonLine(quantize(line), geometries[1]); + assertLatLonPolygon(quantize(polygon), geometries[2]); + } + } + + public void testXYGeometryCollection() { + boolean hasZ = randomBoolean(); + Point point = ShapeTestUtils.randomPoint(hasZ); + Line line = ShapeTestUtils.randomLine(hasZ); + Polygon polygon = ShapeTestUtils.randomPolygon(hasZ); + GeometryCollection geometryCollection = new GeometryCollection<>(List.of(point, line, polygon)); + int[] counter = new int[] { 0 }; + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(geometryCollection, t -> { + if (counter[0] == 0) { + assertEquals(ShapeType.GEOMETRYCOLLECTION, t); + } else if (counter[0] == 1) { + assertEquals(ShapeType.POINT, t); + } else if (counter[0] == 2) { + assertEquals(ShapeType.LINESTRING, t); + } else if (counter[0] == 3) { + assertEquals(ShapeType.POLYGON, t); + } else { + fail("Unexpected counter value"); + } + counter[0]++; + }); + assertEquals(geometryCollection.size(), geometries.length); + assertXYPoint(point, geometries[0]); + assertXYLine(line, geometries[1]); + assertXYPolygon(polygon, geometries[2]); + } + + private Polygon validRandomPolygon(boolean hasLat) { + return randomValueOtherThanMany( + polygon -> GeometryNormalizer.needsNormalize(Orientation.CCW, polygon), + () -> GeometryTestUtils.randomPolygon(hasLat) + ); + } + + public void testLatLonRectangle() { + Rectangle rectangle = GeometryTestUtils.randomRectangle(); + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(rectangle, false, t -> assertEquals(ShapeType.ENVELOPE, t)); + assertEquals(1, geometries.length); + assertLatLonRectangle(rectangle, geometries[0]); + } + + private void assertLatLonRectangle(Rectangle rectangle, LatLonGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.Rectangle.class)); + org.apache.lucene.geo.Rectangle lalonRectangle = (org.apache.lucene.geo.Rectangle) geometry; + assertThat(lalonRectangle.maxLon, equalTo(rectangle.getMaxLon())); + assertThat(lalonRectangle.minLon, equalTo(rectangle.getMinLon())); + assertThat(lalonRectangle.maxLat, equalTo(rectangle.getMaxLat())); + assertThat(lalonRectangle.minLat, equalTo(rectangle.getMinLat())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toLatLonRectangle(rectangle))); + } + + public void testXYRectangle() { + Rectangle rectangle = ShapeTestUtils.randomRectangle(); + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(rectangle, t -> assertEquals(ShapeType.ENVELOPE, t)); + assertEquals(1, geometries.length); + assertXYRectangle(rectangle, geometries[0]); + } + + private void assertXYRectangle(Rectangle rectangle, XYGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.XYRectangle.class)); + org.apache.lucene.geo.XYRectangle xyRectangle = (org.apache.lucene.geo.XYRectangle) geometry; + assertThat(xyRectangle.maxX, equalTo((float) rectangle.getMaxX())); + assertThat(xyRectangle.minX, equalTo((float) rectangle.getMinX())); + assertThat(xyRectangle.maxY, equalTo((float) rectangle.getMaxY())); + assertThat(xyRectangle.minY, equalTo((float) rectangle.getMinY())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toXYRectangle(rectangle))); + } + + public void testLatLonCircle() { + Circle circle = GeometryTestUtils.randomCircle(randomBoolean()); + LatLonGeometry[] geometries = LuceneGeometriesUtils.toLatLonGeometry(circle, false, t -> assertEquals(ShapeType.CIRCLE, t)); + assertEquals(1, geometries.length); + assertLatLonCircle(circle, geometries[0]); + } + + private void assertLatLonCircle(Circle circle, LatLonGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.Circle.class)); + org.apache.lucene.geo.Circle lalonCircle = (org.apache.lucene.geo.Circle) geometry; + assertThat(lalonCircle.getLon(), equalTo(circle.getLon())); + assertThat(lalonCircle.getLat(), equalTo(circle.getLat())); + assertThat(lalonCircle.getRadius(), equalTo(circle.getRadiusMeters())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toLatLonCircle(circle))); + } + + public void testXYCircle() { + Circle circle = ShapeTestUtils.randomCircle(randomBoolean()); + XYGeometry[] geometries = LuceneGeometriesUtils.toXYGeometry(circle, t -> assertEquals(ShapeType.CIRCLE, t)); + assertEquals(1, geometries.length); + assertXYCircle(circle, geometries[0]); + } + + private void assertXYCircle(Circle circle, XYGeometry geometry) { + assertThat(geometry, instanceOf(org.apache.lucene.geo.XYCircle.class)); + org.apache.lucene.geo.XYCircle xyCircle = (org.apache.lucene.geo.XYCircle) geometry; + assertThat(xyCircle.getX(), equalTo((float) circle.getX())); + assertThat(xyCircle.getY(), equalTo((float) circle.getY())); + assertThat(xyCircle.getRadius(), equalTo((float) circle.getRadiusMeters())); + assertThat(geometry, equalTo(LuceneGeometriesUtils.toXYCircle(circle))); + } + + private MultiPolygon validRandomMultiPolygon(boolean hasLat) { + // make sure we don't generate a polygon that gets splitted across the dateline + return randomValueOtherThanMany( + multiPolygon -> GeometryNormalizer.needsNormalize(Orientation.CCW, multiPolygon), + () -> GeometryTestUtils.randomMultiPolygon(hasLat) + ); + } + + private Point quantize(Point point) { + return new Point(GeoUtils.quantizeLon(point.getLon()), GeoUtils.quantizeLat(point.getLat())); + } + + private Line quantize(Line line) { + return new Line( + LuceneGeometriesUtils.LATLON_QUANTIZER.quantizeLons(line.getLons()), + LuceneGeometriesUtils.LATLON_QUANTIZER.quantizeLats(line.getLats()) + ); + } + + private Polygon quantize(Polygon polygon) { + List holes = new ArrayList<>(polygon.getNumberOfHoles()); + for (int i = 0; i < polygon.getNumberOfHoles(); i++) { + holes.add(quantize(polygon.getHole(i))); + } + return new Polygon(quantize(polygon.getPolygon()), holes); + } + + private LinearRing quantize(LinearRing linearRing) { + return new LinearRing( + LuceneGeometriesUtils.LATLON_QUANTIZER.quantizeLons(linearRing.getLons()), + LuceneGeometriesUtils.LATLON_QUANTIZER.quantizeLats(linearRing.getLats()) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index 9e1be4c629b4a..20493ee576c0a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Arrays; import java.util.function.LongSupplier; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; @@ -114,6 +115,20 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException { runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true); } + public void testGetFromTranslogWithDenseVector() throws IOException { + float[] vector = new float[2048]; + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + String docToIndex = Strings.format(""" + { + "bar": %s, + "foo": "foo" + } + """, Arrays.toString(vector)); + runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", docToIndex, "\"text\"", "foo", "\"dense_vector\"", false); + } + private void runGetFromTranslogWithOptions( String docToIndex, String sourceOptions, @@ -122,23 +137,48 @@ private void runGetFromTranslogWithOptions( Object expectedFooVal, boolean sourceOnlyFetchCreatesInMemoryReader ) throws IOException { - IndexMetadata metadata = IndexMetadata.builder("test").putMapping(Strings.format(""" - { - "properties": { - "foo": { - "type": %s, - "store": true - }, - "bar": { "type": %s } - }, - "_source": { %s } - } - }""", fieldType, fieldType, sourceOptions)).settings(indexSettings(IndexVersion.current(), 1, 1)).primaryTerm(0, 1).build(); + runGetFromTranslogWithOptions( + docToIndex, + sourceOptions, + expectedResult, + fieldType, + expectedFooVal, + fieldType, + sourceOnlyFetchCreatesInMemoryReader + ); + } + + private void runGetFromTranslogWithOptions( + String docToIndex, + String sourceOptions, + String expectedResult, + String fieldTypeFoo, + Object expectedFooVal, + String fieldTypeBar, + boolean sourceOnlyFetchCreatesInMemoryReader + ) throws IOException { + IndexMetadata metadata = IndexMetadata.builder("test") + .putMapping(Strings.format(""" + { + "properties": { + "foo": { + "type": %s, + "store": true + }, + "bar": { "type": %s } + }, + "_source": { %s } + } + }""", fieldTypeFoo, fieldTypeBar, sourceOptions)) + .settings(indexSettings(IndexVersion.current(), 1, 1)) + .primaryTerm(0, 1) + .build(); IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, EngineTestCase.randomReaderWrapper()); recoverShardFromStore(primary); LongSupplier translogInMemorySegmentCount = ((InternalEngine) primary.getEngine()).translogInMemorySegmentsCount::get; long translogInMemorySegmentCountExpected = 0; - indexDoc(primary, "test", "0", docToIndex); + Engine.IndexResult res = indexDoc(primary, "test", "0", docToIndex); + assertTrue(res.isCreated()); assertTrue(primary.getEngine().refreshNeeded()); GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index 15f6d0ed377fa..8d2255df9e7e8 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -628,14 +628,17 @@ void overLimitTriggered(boolean leader) { })).toList(); threads.forEach(Thread::start); - safeAwait(barrier); int iterationCount = randomIntBetween(1, 5); + int lastIterationTriggerCount = leaderTriggerCount.get(); + + safeAwait(barrier); for (int i = 0; i < iterationCount; ++i) { memoryUsage.set(randomLongBetween(0, 100)); safeAwait(countDown.get()); assertThat(leaderTriggerCount.get(), lessThanOrEqualTo(i + 1)); - assertThat(leaderTriggerCount.get(), greaterThanOrEqualTo(i / 2 + 1)); + assertThat(leaderTriggerCount.get(), greaterThanOrEqualTo(lastIterationTriggerCount)); + lastIterationTriggerCount = leaderTriggerCount.get(); time.addAndGet(randomLongBetween(interval, interval * 2)); countDown.set(new CountDownLatch(randomIntBetween(1, 20))); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index bfb62e6fed197..057f253f0e50e 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -1252,7 +1252,7 @@ public void testCreateSearchContext() throws IOException { nowInMillis, clusterAlias ); - try (DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { + try (SearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { SearchShardTarget searchShardTarget = searchContext.shardTarget(); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); String expectedIndexName = clusterAlias == null ? index : clusterAlias + ":" + index; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index 70378268dde30..b5927d71bd782 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -247,7 +247,7 @@ public void testNonFinalReduceTopLevelPipelineAggs() { ); List aggs = singletonList(InternalAggregations.from(Collections.singletonList(terms))); InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(aggs, maxBucketReduceContext().forPartialReduction()); - assertEquals(1, reducedAggs.aggregations.size()); + assertEquals(1, reducedAggs.asList().size()); } public void testFinalReduceTopLevelPipelineAggs() { @@ -268,7 +268,7 @@ public void testFinalReduceTopLevelPipelineAggs() { InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(terms)); InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(List.of(aggs), maxBucketReduceContext().forFinalReduction()); - assertEquals(2, reducedAggs.aggregations.size()); + assertEquals(2, reducedAggs.asList().size()); } private AggregationReduceContext.Builder maxBucketReduceContext() { @@ -317,7 +317,7 @@ private void writeToAndReadFrom(InternalAggregations aggregations, TransportVers try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(serializedAggs.bytes), registry)) { in.setTransportVersion(version); InternalAggregations deserialized = InternalAggregations.readFrom(in); - assertEquals(aggregations.aggregations, deserialized.aggregations); + assertEquals(aggregations.asList(), deserialized.asList()); if (iteration < 2) { writeToAndReadFrom(deserialized, version, iteration + 1); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index ad7a6c47ef5e4..2d240f74b91a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -43,9 +43,9 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; @@ -178,7 +178,7 @@ public void testEmbeddedMaxAgg() throws IOException { assertThat(bucket.getKey(), equalTo(1L)); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_max")); assertThat(((Max) (children.asList().get(0))).value(), equalTo(1.0)); @@ -192,7 +192,7 @@ public void testEmbeddedMaxAgg() throws IOException { assertThat(bucket.getKey(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_max")); assertThat(((Max) (children.asList().get(0))).value(), equalTo(1.0)); @@ -292,7 +292,7 @@ public void testNestedTerms() throws IOException { assertThat(bucket.getKey(), equalTo(1L)); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_terms")); assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); @@ -308,7 +308,7 @@ public void testNestedTerms() throws IOException { assertThat(bucket.getKey(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(1L)); - Aggregations children = bucket.getAggregations(); + InternalAggregations children = bucket.getAggregations(); assertThat(children.asList().size(), equalTo(1)); assertThat(children.asList().get(0).getName(), equalTo("the_terms")); assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index 05fcb45c71ee9..8e6d9b5788c54 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -25,10 +25,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -119,12 +118,12 @@ public void testSameAggNames() throws IOException { // Finally, reduce the pipeline agg PipelineAggregator avgBucketAgg = avgBucketBuilder.createInternal(Collections.emptyMap()); - List reducedAggs = new ArrayList<>(2); + List reducedAggs = new ArrayList<>(2); // Histo has to go first to exercise the bug reducedAggs.add(histogramResult); reducedAggs.add(avgResult); - Aggregations aggregations = new Aggregations(reducedAggs); + InternalAggregations aggregations = InternalAggregations.from(reducedAggs); InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator) avgBucketAgg).doReduce(aggregations, null); assertNotNull(pipelineResult); } @@ -174,10 +173,10 @@ public void testComplicatedBucketPath() throws IOException { // Finally, reduce the pipeline agg PipelineAggregator avgBucketAgg = avgBucketBuilder.createInternal(Collections.emptyMap()); - List reducedAggs = new ArrayList<>(4); + List reducedAggs = new ArrayList<>(4); reducedAggs.add(filterResult); - Aggregations aggregations = new Aggregations(reducedAggs); + InternalAggregations aggregations = InternalAggregations.from(reducedAggs); InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator) avgBucketAgg).doReduce(aggregations, null); assertNotNull(pipelineResult); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index 516ffeb9418bd..949f4b9e0677b 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregationsTests; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; @@ -115,8 +115,8 @@ public void testSerialization() throws Exception { assertEquals(querySearchResult.hasAggs(), deserialized.hasAggs()); if (deserialized.hasAggs()) { assertThat(deserialized.aggregations().isSerialized(), is(delayed)); - Aggregations aggs = querySearchResult.consumeAggs(); - Aggregations deserializedAggs = deserialized.consumeAggs(); + InternalAggregations aggs = querySearchResult.consumeAggs(); + InternalAggregations deserializedAggs = deserialized.consumeAggs(); assertEquals(aggs.asList(), deserializedAggs.asList()); assertThat(deserialized.aggregations(), is(nullValue())); } diff --git a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 0b84f14c56ecb..93dd7bc618756 100644 --- a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -9,7 +9,7 @@ package org.elasticsearch.test.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -55,7 +55,7 @@ private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase test StringTerms classes = response.getAggregations().get("class"); Assert.assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); + Map aggs = classBucket.getAggregations().asMap(); Assert.assertTrue(aggs.containsKey("sig_terms")); SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); Assert.assertThat(agg.getBuckets().size(), equalTo(1)); diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java index 70ce86a1d91a6..9980c0a25a5dd 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java @@ -46,8 +46,8 @@ public class MetricsApmIT extends ESRestTestCase { .module("test-apm-integration") .module("apm") .setting("telemetry.metrics.enabled", "true") - .setting("tracing.apm.agent.metrics_interval", "1s") - .setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockApmServer.getPort()) + .setting("telemetry.agent.metrics_interval", "1s") + .setting("telemetry.agent.server_url", "http://127.0.0.1:" + mockApmServer.getPort()) .build(); @Override diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java index 79816114cc38f..93ed525b38b59 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java @@ -51,9 +51,9 @@ public class TracesApmIT extends ESRestTestCase { .module("test-apm-integration") .module("apm") .setting("telemetry.metrics.enabled", "false") - .setting("tracing.apm.enabled", "true") - .setting("tracing.apm.agent.metrics_interval", "1s") - .setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockApmServer.getPort()) + .setting("telemetry.tracing.enabled", "true") + .setting("telemetry.agent.metrics_interval", "1s") + .setting("telemetry.agent.server_url", "http://127.0.0.1:" + mockApmServer.getPort()) .build(); @Override diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index c1c4d70e0b906..aa1889e15d594 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -41,6 +42,7 @@ public static class TestPlugin extends Plugin {} private static final Map ACTIVE_SEARCH_CONTEXTS = new ConcurrentHashMap<>(); private Consumer onPutContext = context -> {}; + private Consumer onRemoveContext = context -> {}; private Consumer onCreateSearchContext = context -> {}; @@ -110,6 +112,7 @@ protected void putReaderContext(ReaderContext context) { protected ReaderContext removeReaderContext(long id) { final ReaderContext removed = super.removeReaderContext(id); if (removed != null) { + onRemoveContext.accept(removed); removeActiveContext(removed); } return removed; @@ -119,6 +122,10 @@ public void setOnPutContext(Consumer onPutContext) { this.onPutContext = onPutContext; } + public void setOnRemoveContext(Consumer onRemoveContext) { + this.onRemoveContext = onRemoveContext; + } + public void setOnCreateSearchContext(Consumer onCreateSearchContext) { this.onCreateSearchContext = onCreateSearchContext; } @@ -141,6 +148,14 @@ protected SearchContext createContext( return searchContext; } + @Override + public SearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { + SearchContext searchContext = super.createSearchContext(request, timeout); + onPutContext.accept(searchContext.readerContext()); + searchContext.addReleasable(() -> onRemoveContext.accept(searchContext.readerContext())); + return searchContext; + } + public void setOnCheckCancelled(Function onCheckCancelled) { this.onCheckCancelled = onCheckCancelled; } diff --git a/x-pack/libs/es-opensaml-security-api/build.gradle b/x-pack/libs/es-opensaml-security-api/build.gradle index 95064f6730133..416be7a785dd5 100644 --- a/x-pack/libs/es-opensaml-security-api/build.gradle +++ b/x-pack/libs/es-opensaml-security-api/build.gradle @@ -7,6 +7,7 @@ */ apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.publish' apply plugin: 'com.github.johnrengelman.shadow' dependencies { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java index c100d57dfb3d1..e71cedf381886 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java @@ -59,7 +59,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalSimpleLongValue(name(), cardinality, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java index 3dc364b1ec131..663299df54f8b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java @@ -101,11 +101,7 @@ private void reduceTDigest( } if (state != null) { - List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); + List aggs = bucket.getAggregations().asList().stream().collect(Collectors.toList()); aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); } @@ -151,11 +147,7 @@ private void reduceHDR( } if (state != null) { - List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); + List aggs = new ArrayList<>(bucket.getAggregations().asList()); aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java index edbd750cdcc52..adb8b691a83ea 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java @@ -71,7 +71,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), normalizedBucketValue, formatter, metadata())); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 1fc477927d7b7..2ddba3446d79a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -60,6 +61,8 @@ public static Request parseRequest(String inferenceEntityId, String taskType, XC Request.Builder builder = PARSER.apply(parser, null); builder.setInferenceEntityId(inferenceEntityId); builder.setTaskType(taskType); + // For rest requests we won't know what the input type is + builder.setInputType(InputType.UNSPECIFIED); return builder.build(); } @@ -96,7 +99,7 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { this.inputType = in.readEnum(InputType.class); } else { - this.inputType = InputType.INGEST; + this.inputType = InputType.UNSPECIFIED; } } @@ -146,11 +149,22 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(input.get(0)); } out.writeGenericMap(taskSettings); + // in version ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED the input type enum was added, so we only want to write the enum if we're + // at that version or later if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { - out.writeEnum(inputType); + out.writeEnum(getInputTypeToWrite(out.getTransportVersion())); } } + private InputType getInputTypeToWrite(TransportVersion version) { + // in version ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED the UNSPECIFIED value was added, so if we're before that + // version other nodes won't know about it, so set it to INGEST instead + if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) && inputType == InputType.UNSPECIFIED) { + return InputType.INGEST; + } + return inputType; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -173,6 +187,7 @@ public static class Builder { private TaskType taskType; private String inferenceEntityId; private List input; + private InputType inputType = InputType.UNSPECIFIED; private Map taskSettings = Map.of(); private Builder() {} @@ -197,13 +212,18 @@ public Builder setInput(List input) { return this; } + public Builder setInputType(InputType inputType) { + this.inputType = inputType; + return this; + } + public Builder setTaskSettings(Map taskSettings) { this.taskSettings = taskSettings; return this; } public Request build() { - return new Request(taskType, inferenceEntityId, input, taskSettings, InputType.INGEST); + return new Request(taskType, inferenceEntityId, input, taskSettings, inputType); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java index bc24ca129635e..a31e83d8246fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.xcontent.ToXContentObject; @@ -45,7 +45,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { * Processes given aggregations as a step towards computing result * @param aggs aggregations from {@link SearchResponse} */ - void process(Aggregations aggs); + void process(InternalAggregations aggs); /** * Gets the evaluation result for this metric. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java index 346996a742cf1..0a1778a6a6f30 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -124,7 +124,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (overallAccuracy.get() == null && aggs.get(OVERALL_ACCURACY_AGG_NAME) instanceof NumericMetricsAggregation.SingleValue) { NumericMetricsAggregation.SingleValue overallAccuracyAgg = aggs.get(OVERALL_ACCURACY_AGG_NAME); overallAccuracy.set(overallAccuracyAgg.value()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java index f7e80e7fcf972..5bdd85e34a7c7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AucRoc.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.nested.Nested; @@ -175,7 +175,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result.get() != null) { return; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java index 5279f026722af..e385e9d9d78d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrix.java @@ -16,8 +16,8 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; @@ -183,7 +183,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (topActualClassNames.get() == null && aggs.get(aggName(STEP_1_AGGREGATE_BY_ACTUAL_CLASS)) != null) { Terms termsAgg = aggs.get(aggName(STEP_1_AGGREGATE_BY_ACTUAL_CLASS)); topActualClassNames.set(termsAgg.getBuckets().stream().map(Terms.Bucket::getKeyAsString).sorted().collect(Collectors.toList())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java index 5b9cffd48f284..6936164ceb07e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java @@ -16,8 +16,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.filter.Filters; @@ -140,7 +140,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { final Aggregation classNamesAgg = aggs.get(ACTUAL_CLASSES_NAMES_AGG_NAME); if (topActualClassNames.get() == null && classNamesAgg instanceof Terms topActualClassesAgg) { if (topActualClassesAgg.getSumOfOtherDocCounts() > 0) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java index 646af7848cf23..6aaabc13c86c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java @@ -15,8 +15,8 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -119,7 +119,7 @@ public final Tuple, List> a } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { final Aggregation byClass = aggs.get(BY_ACTUAL_CLASS_AGG_NAME); final Aggregation avgRecall = aggs.get(AVG_RECALL_AGG_NAME); if (result.get() == null diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java index 99d7853ddab3a..83b6fe58498e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -92,7 +92,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { result = evaluate(aggs); } @@ -103,7 +103,7 @@ public Optional getResult() { protected abstract List aggsAt(String actualField, String predictedProbabilityField); - protected abstract EvaluationMetricResult evaluate(Aggregations aggs); + protected abstract EvaluationMetricResult evaluate(InternalAggregations aggs); enum Condition { TP(true, true), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java index e15148b5fd7e1..c06edb66b301a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/AucRoc.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -155,7 +155,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result.get() != null) { return; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java index bf13b882f3e98..f902274fdc7f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrix.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -87,7 +87,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { long[] tp = new long[thresholds.length]; long[] fp = new long[thresholds.length]; long[] tn = new long[thresholds.length]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java index fcbf1c6216239..d2364faaf7859 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Precision.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +83,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { double[] precisions = new double[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { double threshold = thresholds[i]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java index 07f0cdbb6c17a..8291bcdac30c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/Recall.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +83,7 @@ protected List aggsAt(String actualField, String predictedPr } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public EvaluationMetricResult evaluate(InternalAggregations aggs) { double[] recalls = new double[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { double threshold = thresholds[i]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java index 28802148220b6..4e8ba57ffbc95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Huber.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -118,7 +118,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java index 2a50383494abe..d43ff3e5390b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java @@ -13,7 +13,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ObjectParser; @@ -97,7 +97,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java index 9ca3e39d53c4b..00afd2acff200 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicError.java @@ -14,7 +14,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -113,7 +113,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); result = value == null ? new Result(0.0) : new Result(value.value()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java index fa41661771f62..2e1251abecda1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java @@ -13,7 +13,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; @@ -100,7 +100,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { NumericMetricsAggregation.SingleValue residualSumOfSquares = aggs.get(SS_RES); ExtendedStats extendedStats = aggs.get(ExtendedStatsAggregationBuilder.NAME + "_actual"); // extendedStats.getVariance() is the statistical sumOfSquares divided by count diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 4637ca7edd8dd..dd2baca058102 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -333,7 +333,7 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege WRITE_CONNECTOR_SECRETS = new ActionClusterPrivilege( "write_connector_secrets", - Set.of("cluster:admin/xpack/connector/secret/post") + Set.of("cluster:admin/xpack/connector/secret/post", "cluster:admin/xpack/connector/secret/delete") ); private static final Map VALUES = sortByAccessLevel( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java index 519cd06204dab..368823d0f64af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/MockAggregations.java @@ -7,13 +7,12 @@ package org.elasticsearch.xpack.core.ml.dataframe.evaluation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filters; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import java.util.Collections; @@ -25,7 +24,7 @@ public final class MockAggregations { - public static Terms mockTerms(String name) { + public static StringTerms mockTerms(String name) { return mockTerms(name, Collections.emptyList(), 0); } @@ -44,7 +43,7 @@ public static StringTerms.Bucket mockTermsBucket(String key, InternalAggregation return bucket; } - public static Filters mockFilters(String name) { + public static InternalFilters mockFilters(String name) { return mockFilters(name, Collections.emptyList()); } @@ -68,8 +67,8 @@ public static InternalFilters.InternalBucket mockFiltersBucket(String key, long return bucket; } - public static Filter mockFilter(String name, long docCount) { - Filter agg = mock(Filter.class); + public static InternalFilter mockFilter(String name, long docCount) { + InternalFilter agg = mock(InternalFilter.class); when(agg.getName()).thenReturn(name); when(agg.getDocCount()).thenReturn(docCount); return agg; @@ -89,8 +88,8 @@ public static InternalCardinality mockCardinality(String name, long value) { return agg; } - public static ExtendedStats mockExtendedStats(String name, double variance, long count) { - ExtendedStats agg = mock(ExtendedStats.class); + public static InternalExtendedStats mockExtendedStats(String name, double variance, long count) { + InternalExtendedStats agg = mock(InternalExtendedStats.class); when(agg.getName()).thenReturn(name); when(agg.getVariance()).thenReturn(variance); when(agg.getCount()).thenReturn(count); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java index 50277084cba1e..3bf1ff171e422 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -129,7 +128,7 @@ public void testProcess() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( "accuracy_" + MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java index cc101626667b2..b797961e58b33 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/ClassificationTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -65,7 +65,7 @@ protected NamedXContentRegistry xContentRegistry() { public static Classification createRandom() { List metrics = randomSubsetOf( Arrays.asList( - AccuracyTests.createRandom(), + // AccuracyTests.createRandom(), AucRocTests.createRandom(), PrecisionTests.createRandom(), RecallTests.createRandom(), @@ -341,7 +341,7 @@ public Tuple, List> aggs( } @Override - public void process(Aggregations aggs) { + public void process(InternalAggregations aggs) { if (result != null) { return; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java index 5ab62fd628199..e8e71b8721c26 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -102,7 +101,7 @@ public void testAggs() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, @@ -172,7 +171,7 @@ public void testProcess() { } public void testProcess_OtherClassesCountGreaterThanZero() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( List.of( mockTerms( MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, @@ -257,7 +256,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { mockCardinality(MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 2L) ) ); - Aggregations aggsStep2 = new Aggregations( + InternalAggregations aggsStep2 = InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, @@ -302,7 +301,7 @@ public void testProcess_MoreThanTwoStepsNeeded() { ) ) ); - Aggregations aggsStep3 = new Aggregations( + InternalAggregations aggsStep3 = InternalAggregations.from( List.of( mockFilters( MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java index d4261d81fea2c..f44efff28c034 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PrecisionTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; @@ -63,7 +63,7 @@ public static Precision createRandom() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Precision.ACTUAL_CLASSES_NAMES_AGG_NAME), mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), @@ -81,7 +81,7 @@ public void testProcess() { public void testProcess_GivenMissingAgg() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Precision precision = new Precision(); @@ -89,7 +89,7 @@ public void testProcess_GivenMissingAgg() { assertThat(precision.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue(Precision.AVG_PRECISION_AGG_NAME, 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377) @@ -103,7 +103,7 @@ public void testProcess_GivenMissingAgg() { public void testProcess_GivenAggOfWrongType() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockFilters(Precision.BY_PREDICTED_CLASS_AGG_NAME), mockFilters(Precision.AVG_PRECISION_AGG_NAME)) ); Precision precision = new Precision(); @@ -111,7 +111,7 @@ public void testProcess_GivenAggOfWrongType() { assertThat(precision.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue(Precision.BY_PREDICTED_CLASS_AGG_NAME, 1.0), mockSingleValue(Precision.AVG_PRECISION_AGG_NAME, 0.8123) @@ -124,7 +124,7 @@ public void testProcess_GivenAggOfWrongType() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Collections.singletonList(mockTerms(Precision.ACTUAL_CLASSES_NAMES_AGG_NAME, Collections.emptyList(), 1)) ); Precision precision = new Precision(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java index 5f446083612df..8ba6e48082b71 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/RecallTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; @@ -62,7 +62,7 @@ public static Recall createRandom() { } public void testProcess() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123), @@ -79,7 +79,7 @@ public void testProcess() { public void testProcess_GivenMissingAgg() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Recall recall = new Recall(); @@ -87,7 +87,7 @@ public void testProcess_GivenMissingAgg() { assertThat(recall.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); Recall recall = new Recall(); @@ -98,7 +98,7 @@ public void testProcess_GivenMissingAgg() { public void testProcess_GivenAggOfWrongType() { { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME), mockTerms(Recall.AVG_RECALL_AGG_NAME)) ); Recall recall = new Recall(); @@ -106,7 +106,7 @@ public void testProcess_GivenAggOfWrongType() { assertThat(recall.getResult(), isEmpty()); } { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue(Recall.BY_ACTUAL_CLASS_AGG_NAME, 1.0), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123)) ); Recall recall = new Recall(); @@ -116,7 +116,7 @@ public void testProcess_GivenAggOfWrongType() { } public void testProcess_GivenCardinalityTooHigh() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockTerms(Recall.BY_ACTUAL_CLASS_AGG_NAME, Collections.emptyList(), 1), mockSingleValue(Recall.AVG_RECALL_AGG_NAME, 0.8123) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java index acbd647f7bfa2..1557bd71f98b5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static ConfusionMatrix createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("confusion_matrix_at_0.25_TP", 1L), mockFilter("confusion_matrix_at_0.25_FP", 2L), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java index 299aa76f05fde..bc198eaf3c7db 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/PrecisionTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static Precision createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("precision_at_0.25_TP", 1L), mockFilter("precision_at_0.25_FP", 4L), @@ -73,7 +73,9 @@ public void testEvaluate() { } public void testEvaluate_GivenZeroTpAndFp() { - Aggregations aggs = new Aggregations(Arrays.asList(mockFilter("precision_at_1.0_TP", 0L), mockFilter("precision_at_1.0_FP", 0L))); + InternalAggregations aggs = InternalAggregations.from( + Arrays.asList(mockFilter("precision_at_1.0_TP", 0L), mockFilter("precision_at_1.0_FP", 0L)) + ); Precision precision = new Precision(Arrays.asList(1.0)); EvaluationMetricResult result = precision.evaluate(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java index fb4ab46675eca..569b73417414e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/RecallTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -53,7 +53,7 @@ public static Recall createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockFilter("recall_at_0.25_TP", 1L), mockFilter("recall_at_0.25_FN", 4L), @@ -73,7 +73,9 @@ public void testEvaluate() { } public void testEvaluate_GivenZeroTpAndFp() { - Aggregations aggs = new Aggregations(Arrays.asList(mockFilter("recall_at_1.0_TP", 0L), mockFilter("recall_at_1.0_FN", 0L))); + InternalAggregations aggs = InternalAggregations.from( + Arrays.asList(mockFilter("recall_at_1.0_TP", 0L), mockFilter("recall_at_1.0_FN", 0L)) + ); Recall recall = new Recall(Arrays.asList(1.0)); EvaluationMetricResult result = recall.evaluate(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java index 8e7f4ddd36253..4a8485e8d138f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/HuberTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static Huber createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_huber", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); Huber huber = new Huber((Double) null); huber.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java index c6c0d00dd240f..551a5f017c120 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static MeanSquaredError createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_mse", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); MeanSquaredError mse = new MeanSquaredError(); mse.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java index beb39e46fa5f1..d2bb30fb169b1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -47,7 +47,7 @@ public static MeanSquaredLogarithmicError createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("regression_msle", 0.8123), mockSingleValue("some_other_single_metric_agg", 0.2377)) ); @@ -60,7 +60,9 @@ public void testEvaluate() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377)) + ); MeanSquaredLogarithmicError msle = new MeanSquaredLogarithmicError((Double) null); msle.process(aggs); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java index 644979379703c..710810d2d168e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquaredTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; @@ -48,7 +48,7 @@ public static RSquared createRandom() { } public void testEvaluate() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 10_111), mockExtendedStats("extended_stats_actual", 155.23, 1000), @@ -66,7 +66,7 @@ public void testEvaluate() { } public void testEvaluateWithZeroCount() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 0), mockExtendedStats("extended_stats_actual", 0.0, 0), @@ -83,7 +83,7 @@ public void testEvaluateWithZeroCount() { } public void testEvaluateWithSingleCountZeroVariance() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList( mockSingleValue("residual_sum_of_squares", 1), mockExtendedStats("extended_stats_actual", 0.0, 1), @@ -100,7 +100,9 @@ public void testEvaluateWithSingleCountZeroVariance() { } public void testEvaluate_GivenMissingAggs() { - Aggregations aggs = new Aggregations(Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))); + InternalAggregations aggs = InternalAggregations.from( + (Collections.singletonList(mockSingleValue("some_other_single_metric_agg", 0.2377))) + ); RSquared rSquared = new RSquared(); rSquared.process(aggs); @@ -110,7 +112,7 @@ public void testEvaluate_GivenMissingAggs() { } public void testEvaluate_GivenMissingExtendedStatsAgg() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("some_other_single_metric_agg", 0.2377), mockSingleValue("residual_sum_of_squares", 0.2377)) ); @@ -122,7 +124,7 @@ public void testEvaluate_GivenMissingExtendedStatsAgg() { } public void testEvaluate_GivenMissingResidualSumOfSquaresAgg() { - Aggregations aggs = new Aggregations( + InternalAggregations aggs = InternalAggregations.from( Arrays.asList(mockSingleValue("some_other_single_metric_agg", 0.2377), mockExtendedStats("extended_stats_actual", 100, 50)) ); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 95de6e3ab2027..28eb9ae66a4e0 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -60,9 +60,9 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -1059,7 +1059,7 @@ private RolloverResponse rollover(String dataStreamName) throws ExecutionExcepti return response; } - private Aggregations aggregate(final String index, AggregationBuilder aggregationBuilder) { + private InternalAggregations aggregate(final String index, AggregationBuilder aggregationBuilder) { var resp = client().prepareSearch(index).addAggregation(aggregationBuilder).get(); try { return resp.getAggregations(); @@ -1138,8 +1138,8 @@ private void assertDownsampleIndexAggregations( Map labelFields ) { final AggregationBuilder aggregations = buildAggregations(config, metricFields, labelFields, config.getTimestampField()); - Aggregations origResp = aggregate(sourceIndex, aggregations); - Aggregations downsampleResp = aggregate(downsampleIndex, aggregations); + InternalAggregations origResp = aggregate(sourceIndex, aggregations); + InternalAggregations downsampleResp = aggregate(downsampleIndex, aggregations); assertEquals(origResp.asMap().keySet(), downsampleResp.asMap().keySet()); StringTerms originalTsIdTermsAggregation = (StringTerms) origResp.getAsMap().values().stream().toList().get(0); @@ -1164,25 +1164,25 @@ private void assertDownsampleIndexAggregations( InternalDateHistogram.Bucket downsampleDateHistogramBucket = downsampleDateHistogramBuckets.get(i); assertEquals(originalDateHistogramBucket.getKeyAsString(), downsampleDateHistogramBucket.getKeyAsString()); - Aggregations originalAggregations = originalDateHistogramBucket.getAggregations(); - Aggregations downsampleAggregations = downsampleDateHistogramBucket.getAggregations(); + InternalAggregations originalAggregations = originalDateHistogramBucket.getAggregations(); + InternalAggregations downsampleAggregations = downsampleDateHistogramBucket.getAggregations(); assertEquals(originalAggregations.asList().size(), downsampleAggregations.asList().size()); - List nonTopHitsOriginalAggregations = originalAggregations.asList() + List nonTopHitsOriginalAggregations = originalAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits") == false) .toList(); - List nonTopHitsDownsampleAggregations = downsampleAggregations.asList() + List nonTopHitsDownsampleAggregations = downsampleAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits") == false) .toList(); assertEquals(nonTopHitsOriginalAggregations, nonTopHitsDownsampleAggregations); - List topHitsOriginalAggregations = originalAggregations.asList() + List topHitsOriginalAggregations = originalAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits")) .toList(); - List topHitsDownsampleAggregations = downsampleAggregations.asList() + List topHitsDownsampleAggregations = downsampleAggregations.asList() .stream() .filter(agg -> agg.getType().equals("top_hits")) .toList(); @@ -1224,7 +1224,7 @@ private void assertDownsampleIndexAggregations( ); Object originalLabelValue = originalHit.getDocumentFields().values().stream().toList().get(0).getValue(); Object downsampleLabelValue = downsampleHit.getDocumentFields().values().stream().toList().get(0).getValue(); - Optional labelAsMetric = nonTopHitsOriginalAggregations.stream() + Optional labelAsMetric = nonTopHitsOriginalAggregations.stream() .filter(agg -> agg.getName().equals("metric_" + downsampleTopHits.getName())) .findFirst(); // NOTE: this check is possible only if the label can be indexed as a metric (the label is a numeric field) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index df4a640a0495d..5a7ab14dc6386 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -57,7 +57,7 @@ setup: connector_id: test-connector body: configuration: - some_field: + some_new_field: default_value: null depends_on: - field: some_field @@ -92,20 +92,22 @@ setup: connector.get: connector_id: test-connector - - match: { configuration.some_field.value: 456 } + - is_false: configuration.some_field # configuration.some_field doesn't exist + + - match: { configuration.some_new_field.value: 456 } - match: { status: configured } - - match: { configuration.some_field.validations.0.constraint: [123, 456, 789] } - - match: { configuration.some_field.validations.0.type: included_in } - - match: { configuration.some_field.validations.1.constraint: ["string 1", "string 2", "string 3"] } - - match: { configuration.some_field.validations.1.type: included_in } - - match: { configuration.some_field.validations.2.constraint: 0 } - - match: { configuration.some_field.validations.2.type: greater_than } - - match: { configuration.some_field.validations.3.constraint: 42 } - - match: { configuration.some_field.validations.3.type: less_than } - - match: { configuration.some_field.validations.4.constraint: int } - - match: { configuration.some_field.validations.4.type: list_type } - - match: { configuration.some_field.validations.5.constraint: "\\d+" } - - match: { configuration.some_field.validations.5.type: regex } + - match: { configuration.some_new_field.validations.0.constraint: [123, 456, 789] } + - match: { configuration.some_new_field.validations.0.type: included_in } + - match: { configuration.some_new_field.validations.1.constraint: ["string 1", "string 2", "string 3"] } + - match: { configuration.some_new_field.validations.1.type: included_in } + - match: { configuration.some_new_field.validations.2.constraint: 0 } + - match: { configuration.some_new_field.validations.2.type: greater_than } + - match: { configuration.some_new_field.validations.3.constraint: 42 } + - match: { configuration.some_new_field.validations.3.type: less_than } + - match: { configuration.some_new_field.validations.4.constraint: int } + - match: { configuration.some_new_field.validations.4.type: list_type } + - match: { configuration.some_new_field.validations.5.constraint: "\\d+" } + - match: { configuration.some_new_field.validations.5.type: regex } --- "Update Connector Configuration with null tooltip": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml index 4b2d3777ffe9d..8fd676bb977b6 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml @@ -53,7 +53,7 @@ setup: catch: unauthorized --- -'Get connector secret - Missing secret id': +'Get connector secret - Secret does not exist': - do: connector_secret.get: id: non-existing-secret-id diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml new file mode 100644 index 0000000000000..ed50fc55a81e0 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml @@ -0,0 +1,71 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + +--- +'Delete connector secret - admin': + - do: + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + + - do: + connector_secret.delete: + id: $id + - match: { deleted: true } + + - do: + connector_secret.get: + id: $id + catch: missing + +--- +'Delete connector secret - user with privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.delete: + id: $id + - match: { deleted: true } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.get: + id: $id + catch: missing + +--- +'Delete connector secret - user without privileges': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.post: + body: + value: my-secret + - set: { id: id } + - match: { id: $id } + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged + connector_secret.delete: + id: $id + catch: unauthorized + +--- +'Delete connector secret - Secret does not exist': + - do: + connector_secret.delete: + id: non-existing-secret-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index d344bd60a22bd..3933e7923d6b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -90,10 +90,13 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorServiceTypeAction; import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsFeature; import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.RestDeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.RestGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.RestPostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.TransportDeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPostConnectorSecretAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -271,6 +274,7 @@ protected XPackLicenseState getLicenseState() { if (ConnectorSecretsFeature.isEnabled()) { actionHandlers.addAll( List.of( + new ActionHandler<>(DeleteConnectorSecretAction.INSTANCE, TransportDeleteConnectorSecretAction.class), new ActionHandler<>(GetConnectorSecretAction.INSTANCE, TransportGetConnectorSecretAction.class), new ActionHandler<>(PostConnectorSecretAction.INSTANCE, TransportPostConnectorSecretAction.class) ) @@ -355,7 +359,9 @@ public List getRestHandlers( } if (ConnectorSecretsFeature.isEnabled()) { - restHandlers.addAll(List.of(new RestGetConnectorSecretAction(), new RestPostConnectorSecretAction())); + restHandlers.addAll( + List.of(new RestGetConnectorSecretAction(), new RestPostConnectorSecretAction(), new RestDeleteConnectorSecretAction()) + ); } return Collections.unmodifiableList(restHandlers); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index fdbf27929789f..b7ddf560247ed 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -356,34 +356,38 @@ public static Connector fromXContent(XContentParser parser, String docId) throws return PARSER.parse(parser, docId); } + public void toInnerXContent(XContentBuilder builder, Params params) throws IOException { + // The "id": connectorId is included in GET and LIST responses to provide the connector's docID. + // Note: This ID is not written to the Elasticsearch index; it's only for API response purposes. + if (connectorId != null) { + builder.field(ID_FIELD.getPreferredName(), connectorId); + } + builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); + builder.xContentValuesMap(CONFIGURATION_FIELD.getPreferredName(), configuration); + builder.xContentValuesMap(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); + builder.field(DESCRIPTION_FIELD.getPreferredName(), description); + builder.field(ERROR_FIELD.getPreferredName(), error); + builder.field(FEATURES_FIELD.getPreferredName(), features); + builder.xContentList(FILTERING_FIELD.getPreferredName(), filtering); + builder.field(INDEX_NAME_FIELD.getPreferredName(), indexName); + builder.field(IS_NATIVE_FIELD.getPreferredName(), isNative); + builder.field(LANGUAGE_FIELD.getPreferredName(), language); + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + syncInfo.toXContent(builder, params); + builder.field(NAME_FIELD.getPreferredName(), name); + builder.field(PIPELINE_FIELD.getPreferredName(), pipeline); + builder.field(SCHEDULING_FIELD.getPreferredName(), scheduling); + builder.field(SERVICE_TYPE_FIELD.getPreferredName(), serviceType); + builder.field(SYNC_CURSOR_FIELD.getPreferredName(), syncCursor); + builder.field(STATUS_FIELD.getPreferredName(), status.toString()); + builder.field(SYNC_NOW_FIELD.getPreferredName(), syncNow); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - // The "id": connectorId is included in GET and LIST responses to provide the connector's docID. - // Note: This ID is not written to the Elasticsearch index; it's only for API response purposes. - if (connectorId != null) { - builder.field(ID_FIELD.getPreferredName(), connectorId); - } - builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); - builder.xContentValuesMap(CONFIGURATION_FIELD.getPreferredName(), configuration); - builder.xContentValuesMap(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); - builder.field(DESCRIPTION_FIELD.getPreferredName(), description); - builder.field(ERROR_FIELD.getPreferredName(), error); - builder.field(FEATURES_FIELD.getPreferredName(), features); - builder.xContentList(FILTERING_FIELD.getPreferredName(), filtering); - builder.field(INDEX_NAME_FIELD.getPreferredName(), indexName); - builder.field(IS_NATIVE_FIELD.getPreferredName(), isNative); - builder.field(LANGUAGE_FIELD.getPreferredName(), language); - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); - syncInfo.toXContent(builder, params); - builder.field(NAME_FIELD.getPreferredName(), name); - builder.field(PIPELINE_FIELD.getPreferredName(), pipeline); - builder.field(SCHEDULING_FIELD.getPreferredName(), scheduling); - builder.field(SERVICE_TYPE_FIELD.getPreferredName(), serviceType); - builder.field(SYNC_CURSOR_FIELD.getPreferredName(), syncCursor); - builder.field(STATUS_FIELD.getPreferredName(), status.toString()); - builder.field(SYNC_NOW_FIELD.getPreferredName(), syncNow); + toInnerXContent(builder, params); } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index d92074dacc129..cf6c3190a37b4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -26,11 +26,12 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; @@ -47,6 +48,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; @@ -175,7 +177,7 @@ private Connector createConnectorWithDefaultValues( * @param connectorId The id of the connector object. * @param listener The action listener to invoke on response/failure. */ - public void getConnector(String connectorId, ActionListener listener) { + public void getConnector(String connectorId, ActionListener listener) { try { final GetRequest getRequest = new GetRequest(CONNECTOR_INDEX_NAME).id(connectorId).realtime(true); @@ -185,11 +187,11 @@ public void getConnector(String connectorId, ActionListener listener) return; } try { - final Connector connector = Connector.fromXContentBytes( - getResponse.getSourceAsBytesRef(), - connectorId, - XContentType.JSON - ); + final ConnectorSearchResult connector = new ConnectorSearchResult.Builder().setId(connectorId) + .setResultBytes(getResponse.getSourceAsBytesRef()) + .setResultMap(getResponse.getSourceAsMap()) + .build(); + l.onResponse(connector); } catch (Exception e) { listener.onFailure(e); @@ -269,6 +271,8 @@ public void onFailure(Exception e) { /** * Updates the {@link ConnectorConfiguration} property of a {@link Connector}. + * The update process is non-additive; it completely replaces all existing configuration fields with the new configuration mapping, + * thereby deleting any old configurations. * * @param request Request for updating connector configuration property. * @param listener Listener to respond to a successful response or an error. @@ -276,19 +280,32 @@ public void onFailure(Exception e) { public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( - new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connectorId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source( - Map.of( - Connector.CONFIGURATION_FIELD.getPreferredName(), - request.getConfiguration(), - Connector.STATUS_FIELD.getPreferredName(), - ConnectorStatus.CONFIGURED.toString() - ) - ) + + String updateConfigurationScript = String.format( + Locale.ROOT, + """ + ctx._source.%s = params.%s; + ctx._source.%s = params.%s; + """, + Connector.CONFIGURATION_FIELD.getPreferredName(), + Connector.CONFIGURATION_FIELD.getPreferredName(), + Connector.STATUS_FIELD.getPreferredName(), + Connector.STATUS_FIELD.getPreferredName() + ); + Script script = new Script( + ScriptType.INLINE, + "painless", + updateConfigurationScript, + Map.of( + Connector.CONFIGURATION_FIELD.getPreferredName(), + request.getConfiguration(), + Connector.STATUS_FIELD.getPreferredName(), + ConnectorStatus.CONFIGURED.toString() + ) ); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).script(script) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + clientWithOrigin.update( updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { @@ -567,7 +584,9 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request String connectorId = request.getConnectorId(); getConnector(connectorId, listener.delegateFailure((l, connector) -> { - ConnectorStatus prevStatus = connector.getStatus(); + ConnectorStatus prevStatus = ConnectorStatus.connectorStatus( + (String) connector.getResultMap().get(Connector.STATUS_FIELD.getPreferredName()) + ); ConnectorStatus newStatus = prevStatus == ConnectorStatus.CREATED ? ConnectorStatus.CREATED : ConnectorStatus.NEEDS_CONFIGURATION; @@ -603,20 +622,23 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request } private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { - final List connectorResults = Arrays.stream(response.getHits().getHits()) + final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); } - private static Connector hitToConnector(SearchHit searchHit) { + private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { // todo: don't return sensitive data from configuration in list endpoint - return Connector.fromXContentBytes(searchHit.getSourceRef(), searchHit.getId(), XContentType.JSON); + return new ConnectorSearchResult.Builder().setId(searchHit.getId()) + .setResultBytes(searchHit.getSourceRef()) + .setResultMap(searchHit.getSourceAsMap()) + .build(); } - public record ConnectorResult(List connectors, long totalResults) {} + public record ConnectorResult(List connectors, long totalResults) {} /** * Listeners that checks failures for IndexNotFoundException, and transforms them in ResourceNotFoundException, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSearchResult.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSearchResult.java new file mode 100644 index 0000000000000..d054542e0865a --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSearchResult.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; +import java.util.Map; + +public class ConnectorSearchResult extends ConnectorsAPISearchResult { + + public ConnectorSearchResult(StreamInput in) throws IOException { + super(in); + } + + private ConnectorSearchResult(BytesReference resultBytes, Map resultMap, String id) { + super(resultBytes, resultMap, id); + } + + public static class Builder { + + private BytesReference resultBytes; + private Map resultMap; + private String id; + + public Builder setResultBytes(BytesReference resultBytes) { + this.resultBytes = resultBytes; + return this; + } + + public Builder setResultMap(Map resultMap) { + this.resultMap = resultMap; + return this; + } + + public Builder setId(String id) { + this.id = id; + return this; + } + + public ConnectorSearchResult build() { + return new ConnectorSearchResult(resultBytes, resultMap, id); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorsAPISearchResult.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorsAPISearchResult.java new file mode 100644 index 0000000000000..a00e3748565d8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorsAPISearchResult.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * Represents the outcome of a search query in the connectors and sync job index, encapsulating the search result. + * It includes a raw byte reference to the result which can be deserialized into a {@link Connector} or {@link ConnectorSyncJob} object, + * and a result map for returning the data without strict deserialization. + */ +public class ConnectorsAPISearchResult implements Writeable, ToXContentObject { + + private final BytesReference resultBytes; + private final Map resultMap; + private final String docId; + + protected ConnectorsAPISearchResult(BytesReference resultBytes, Map resultMap, String id) { + this.resultBytes = resultBytes; + this.resultMap = resultMap; + this.docId = id; + } + + public ConnectorsAPISearchResult(StreamInput in) throws IOException { + this.resultBytes = in.readBytesReference(); + this.resultMap = in.readGenericMap(); + this.docId = in.readString(); + } + + public BytesReference getSourceRef() { + return resultBytes; + } + + public Map getResultMap() { + return resultMap; + } + + public String getDocId() { + return docId; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("id", docId); + builder.mapContents(resultMap); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBytesReference(resultBytes); + out.writeGenericMap(resultMap); + out.writeString(docId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectorsAPISearchResult that = (ConnectorsAPISearchResult) o; + return Objects.equals(resultBytes, that.resultBytes) + && Objects.equals(resultMap, that.resultMap) + && Objects.equals(docId, that.docId); + } + + @Override + public int hashCode() { + return Objects.hash(resultBytes, resultMap, docId); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java index 88eacc8f437b4..a9792458f1963 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorSearchResult; import java.io.IOException; import java.util.Objects; @@ -110,15 +110,15 @@ public static Request parse(XContentParser parser) { public static class Response extends ActionResponse implements ToXContentObject { - private final Connector connector; + private final ConnectorSearchResult connector; - public Response(Connector connector) { + public Response(ConnectorSearchResult connector) { this.connector = connector; } public Response(StreamInput in) throws IOException { super(in); - this.connector = new Connector(in); + this.connector = new ConnectorSearchResult(in); } @Override @@ -131,10 +131,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return connector.toXContent(builder, params); } - public static GetConnectorAction.Response fromXContent(XContentParser parser, String docId) throws IOException { - return new GetConnectorAction.Response(Connector.fromXContent(parser, docId)); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java index 3b286569ce881..b4a3a2c0d3632 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorSearchResult; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.action.util.QueryPage; @@ -105,14 +105,14 @@ public static class Response extends ActionResponse implements ToXContentObject public static final ParseField RESULT_FIELD = new ParseField("results"); - final QueryPage queryPage; + final QueryPage queryPage; public Response(StreamInput in) throws IOException { super(in); - this.queryPage = new QueryPage<>(in, Connector::new); + this.queryPage = new QueryPage<>(in, ConnectorSearchResult::new); } - public Response(List items, Long totalResults) { + public Response(List items, Long totalResults) { this.queryPage = new QueryPage<>(items, totalResults, RESULT_FIELD); } @@ -126,7 +126,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return queryPage.toXContent(builder, params); } - public QueryPage queryPage() { + public QueryPage queryPage() { return queryPage; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java index 633909ac2aa89..c994fc1155277 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java @@ -10,11 +10,13 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; @@ -93,4 +95,19 @@ public void createSecret(PostConnectorSecretRequest request, ActionListener listener) { + try { + clientWithOrigin.prepareDelete(CONNECTOR_SECRETS_INDEX_NAME, id) + .execute(listener.delegateFailureAndWrap((delegate, deleteResponse) -> { + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + id + "]")); + return; + } + delegate.onResponse(new DeleteConnectorSecretResponse(deleteResponse.getResult() == DocWriteResponse.Result.DELETED)); + })); + } catch (Exception e) { + listener.onFailure(e); + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..b97911a350972 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretAction.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionType; + +public class DeleteConnectorSecretAction { + + public static final String NAME = "cluster:admin/xpack/connector/secret/delete"; + + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private DeleteConnectorSecretAction() {/* no instances */} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java new file mode 100644 index 0000000000000..183362f64ea8f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class DeleteConnectorSecretRequest extends ActionRequest { + + private final String id; + + public DeleteConnectorSecretRequest(String id) { + this.id = Objects.requireNonNull(id); + } + + public DeleteConnectorSecretRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public String id() { + return id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(id)) { + validationException = addValidationError("id missing", validationException); + } + + return validationException; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteConnectorSecretRequest that = (DeleteConnectorSecretRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java new file mode 100644 index 0000000000000..7568d3f193779 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponse.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteConnectorSecretResponse extends ActionResponse implements ToXContentObject { + + private final boolean deleted; + + public DeleteConnectorSecretResponse(boolean deleted) { + this.deleted = deleted; + } + + public DeleteConnectorSecretResponse(StreamInput in) throws IOException { + super(in); + this.deleted = in.readBoolean(); + } + + public boolean isDeleted() { + return deleted; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(deleted); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("deleted", deleted); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteConnectorSecretResponse that = (DeleteConnectorSecretResponse) o; + return deleted == that.deleted; + } + + @Override + public int hashCode() { + return Objects.hash(deleted); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..cd1c9b5f19498 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestDeleteConnectorSecretAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +@ServerlessScope(Scope.INTERNAL) +public class RestDeleteConnectorSecretAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_delete_secret"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.DELETE, "/_connector/_secret/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final String id = request.param("id"); + return restChannel -> client.execute( + DeleteConnectorSecretAction.INSTANCE, + new DeleteConnectorSecretRequest(id), + new RestToXContentListener<>(restChannel) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java new file mode 100644 index 0000000000000..7c87598440cfd --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; + +public class TransportDeleteConnectorSecretAction extends HandledTransportAction< + DeleteConnectorSecretRequest, + DeleteConnectorSecretResponse> { + + private final ConnectorSecretsIndexService connectorSecretsIndexService; + + @Inject + public TransportDeleteConnectorSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + DeleteConnectorSecretAction.NAME, + transportService, + actionFilters, + DeleteConnectorSecretRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client); + } + + protected void doExecute(Task task, DeleteConnectorSecretRequest request, ActionListener listener) { + connectorSecretsIndexService.deleteSecret(request.id(), listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 48f3f2a117d63..fb34035e5400b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -200,7 +200,7 @@ private ConnectorSyncJob( this.createdAt = createdAt; this.deletedDocumentCount = deletedDocumentCount; this.error = error; - this.id = Objects.requireNonNull(id, "[id] cannot be null"); + this.id = id; this.indexedDocumentCount = indexedDocumentCount; this.indexedDocumentVolume = indexedDocumentVolume; this.jobType = Objects.requireNonNullElse(jobType, ConnectorSyncJobType.FULL); @@ -235,10 +235,10 @@ public ConnectorSyncJob(StreamInput in) throws IOException { } @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_sync_job", true, - (args) -> { + (args, docId) -> { int i = 0; return new Builder().setCancellationRequestedAt((Instant) args[i++]) .setCanceledAt((Instant) args[i++]) @@ -247,7 +247,7 @@ public ConnectorSyncJob(StreamInput in) throws IOException { .setCreatedAt((Instant) args[i++]) .setDeletedDocumentCount((Long) args[i++]) .setError((String) args[i++]) - .setId((String) args[i++]) + .setId(docId) .setIndexedDocumentCount((Long) args[i++]) .setIndexedDocumentVolume((Long) args[i++]) .setJobType((ConnectorSyncJobType) args[i++]) @@ -295,7 +295,6 @@ public ConnectorSyncJob(StreamInput in) throws IOException { ); PARSER.declareLong(constructorArg(), DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareStringOrNull(optionalConstructorArg(), ERROR_FIELD); - PARSER.declareString(constructorArg(), ID_FIELD); PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_COUNT_FIELD); PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_VOLUME_FIELD); PARSER.declareField( @@ -383,16 +382,16 @@ public ConnectorSyncJob(StreamInput in) throws IOException { ); } - public static ConnectorSyncJob fromXContentBytes(BytesReference source, XContentType xContentType) { + public static ConnectorSyncJob fromXContentBytes(BytesReference source, String docId, XContentType xContentType) { try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { - return ConnectorSyncJob.fromXContent(parser); + return ConnectorSyncJob.fromXContent(parser, docId); } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse a connector sync job document.", e); } } - public static ConnectorSyncJob fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); + public static ConnectorSyncJob fromXContent(XContentParser parser, String docId) throws IOException { + return PARSER.parse(parser, docId); } public static Connector syncJobConnectorFromXContentBytes(BytesReference source, String connectorId, XContentType xContentType) { @@ -479,70 +478,73 @@ public String getWorkerHostname() { return workerHostname; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); + public void toInnerXContent(XContentBuilder builder, Params params) throws IOException { + if (cancelationRequestedAt != null) { + builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); + } + if (canceledAt != null) { + builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); + } + if (completedAt != null) { + builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + } + + builder.startObject(CONNECTOR_FIELD.getPreferredName()); { - if (cancelationRequestedAt != null) { - builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); - } - if (canceledAt != null) { - builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); + if (connector.getConnectorId() != null) { + builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); } - if (completedAt != null) { - builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + if (connector.getSyncJobFiltering() != null) { + builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getSyncJobFiltering()); } - - builder.startObject(CONNECTOR_FIELD.getPreferredName()); - { - if (connector.getConnectorId() != null) { - builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); - } - if (connector.getSyncJobFiltering() != null) { - builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getSyncJobFiltering()); - } - if (connector.getIndexName() != null) { - builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); - } - if (connector.getLanguage() != null) { - builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); - } - if (connector.getPipeline() != null) { - builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); - } - if (connector.getServiceType() != null) { - builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); - } - if (connector.getConfiguration() != null) { - builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); - } + if (connector.getIndexName() != null) { + builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); } - builder.endObject(); - - builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); - builder.field(DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); - if (error != null) { - builder.field(ERROR_FIELD.getPreferredName(), error); + if (connector.getLanguage() != null) { + builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); } - builder.field(ID_FIELD.getPreferredName(), id); - builder.field(INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); - builder.field(INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); - builder.field(JOB_TYPE_FIELD.getPreferredName(), jobType); - if (lastSeen != null) { - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + if (connector.getPipeline() != null) { + builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); } - builder.field(METADATA_FIELD.getPreferredName(), metadata); - if (startedAt != null) { - builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + if (connector.getServiceType() != null) { + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); } - builder.field(STATUS_FIELD.getPreferredName(), status); - builder.field(TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); - builder.field(TRIGGER_METHOD_FIELD.getPreferredName(), triggerMethod); - if (workerHostname != null) { - builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + if (connector.getConfiguration() != null) { + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); } } builder.endObject(); + + builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); + builder.field(DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); + if (error != null) { + builder.field(ERROR_FIELD.getPreferredName(), error); + } + builder.field(INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); + builder.field(INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); + builder.field(JOB_TYPE_FIELD.getPreferredName(), jobType); + if (lastSeen != null) { + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } + builder.field(METADATA_FIELD.getPreferredName(), metadata); + if (startedAt != null) { + builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + } + builder.field(STATUS_FIELD.getPreferredName(), status); + builder.field(TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); + builder.field(TRIGGER_METHOD_FIELD.getPreferredName(), triggerMethod); + if (workerHostname != null) { + builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + toInnerXContent(builder, params); + } + builder.endObject(); return builder; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 01a297a11103b..b6d20b9f0e777 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -97,14 +97,11 @@ public void createConnectorSyncJob( ); try { - String syncJobId = generateId(); - final IndexRequest indexRequest = new IndexRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(syncJobId) - .opType(DocWriteRequest.OpType.INDEX) + final IndexRequest indexRequest = new IndexRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - ConnectorSyncJob syncJob = new ConnectorSyncJob.Builder().setId(syncJobId) - .setJobType(jobType) + ConnectorSyncJob syncJob = new ConnectorSyncJob.Builder().setJobType(jobType) .setTriggerMethod(triggerMethod) .setStatus(ConnectorSyncJob.DEFAULT_INITIAL_STATUS) .setConnector(connector) @@ -195,7 +192,7 @@ public void checkInConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + public void getConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { final GetRequest getRequest = new GetRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(connectorSyncJobId).realtime(true); try { @@ -208,11 +205,10 @@ public void getConnectorSyncJob(String connectorSyncJobId, ActionListener connectorSyncJobs = Arrays.stream(searchResponse.getHits().getHits()) + final List connectorSyncJobs = Arrays.stream(searchResponse.getHits().getHits()) .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); @@ -346,13 +342,17 @@ private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchR ); } - private static ConnectorSyncJob hitToConnectorSyncJob(SearchHit searchHit) { + private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { // TODO: don't return sensitive data from configuration inside connector in list endpoint - return ConnectorSyncJob.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + return new ConnectorSyncJobSearchResult.Builder().setId(searchHit.getId()) + .setResultBytes(searchHit.getSourceRef()) + .setResultMap(searchHit.getSourceAsMap()) + .build(); + } - public record ConnectorSyncJobsResult(List connectorSyncJobs, long totalResults) {} + public record ConnectorSyncJobsResult(List connectorSyncJobs, long totalResults) {} /** * Updates the ingestion stats of the {@link ConnectorSyncJob} in the underlying index. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobSearchResult.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobSearchResult.java new file mode 100644 index 0000000000000..7ab2719dcbea2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobSearchResult.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.application.connector.ConnectorsAPISearchResult; + +import java.io.IOException; +import java.util.Map; + +public class ConnectorSyncJobSearchResult extends ConnectorsAPISearchResult { + + public ConnectorSyncJobSearchResult(StreamInput in) throws IOException { + super(in); + } + + private ConnectorSyncJobSearchResult(BytesReference resultBytes, Map resultMap, String id) { + super(resultBytes, resultMap, id); + } + + public static class Builder { + + private BytesReference resultBytes; + private Map resultMap; + private String id; + + public Builder setResultBytes(BytesReference resultBytes) { + this.resultBytes = resultBytes; + return this; + } + + public Builder setResultMap(Map resultMap) { + this.resultMap = resultMap; + return this; + } + + public Builder setId(String id) { + this.id = id; + return this; + } + + public ConnectorSyncJobSearchResult build() { + return new ConnectorSyncJobSearchResult(resultBytes, resultMap, id); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java index 9e21ba7e94f1f..31441883f061c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java @@ -19,8 +19,8 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobSearchResult; import java.io.IOException; import java.util.Objects; @@ -110,15 +110,15 @@ public static Request parse(XContentParser parser) { } public static class Response extends ActionResponse implements ToXContentObject { - private final ConnectorSyncJob connectorSyncJob; + private final ConnectorSyncJobSearchResult connectorSyncJob; - public Response(ConnectorSyncJob connectorSyncJob) { + public Response(ConnectorSyncJobSearchResult connectorSyncJob) { this.connectorSyncJob = connectorSyncJob; } public Response(StreamInput in) throws IOException { super(in); - this.connectorSyncJob = new ConnectorSyncJob(in); + this.connectorSyncJob = new ConnectorSyncJobSearchResult(in); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java index 298eee466bfb2..c81df8b642b37 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobSearchResult; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.action.util.QueryPage; @@ -133,14 +134,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static class Response extends ActionResponse implements ToXContentObject { public static final ParseField RESULTS_FIELD = new ParseField("results"); - final QueryPage queryPage; + final QueryPage queryPage; public Response(StreamInput in) throws IOException { super(in); - this.queryPage = new QueryPage<>(in, ConnectorSyncJob::new); + this.queryPage = new QueryPage<>(in, ConnectorSyncJobSearchResult::new); } - public Response(List items, Long totalResults) { + public Response(List items, Long totalResults) { this.queryPage = new QueryPage<>(items, totalResults, RESULTS_FIELD); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 542ea948c12df..c043bfd4453d8 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -12,8 +12,16 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; @@ -29,11 +37,14 @@ import org.junit.Before; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -51,6 +62,13 @@ public void setup() { this.connectorIndexService = new ConnectorIndexService(client()); } + @Override + protected Collection> getPlugins() { + List> plugins = new ArrayList<>(super.getPlugins()); + plugins.add(MockPainlessScriptEngine.TestPlugin.class); + return plugins; + } + public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -92,21 +110,16 @@ public void testUpdateConnectorConfiguration() throws Exception { DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); - Map connectorConfiguration = connector.getConfiguration() - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> ConnectorTestUtils.getRandomConnectorConfigurationField())); - UpdateConnectorConfigurationAction.Request updateConfigurationRequest = new UpdateConnectorConfigurationAction.Request( connectorId, - connectorConfiguration + connector.getConfiguration() ); DocWriteResponse updateResponse = awaitUpdateConnectorConfiguration(updateConfigurationRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connectorId); - assertThat(connectorConfiguration, equalTo(indexedConnector.getConfiguration())); - assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONFIGURED)); + + // Configuration update is handled via painless script. ScriptEngine is mocked for unit tests. + // More comprehensive tests are defined in yamlRestTest. } public void testUpdateConnectorPipeline() throws Exception { @@ -401,7 +414,13 @@ private Connector awaitGetConnector(String connectorId) throws Exception { final AtomicReference exc = new AtomicReference<>(null); connectorIndexService.getConnector(connectorId, new ActionListener<>() { @Override - public void onResponse(Connector connector) { + public void onResponse(ConnectorSearchResult connectorResult) { + // Serialize the sourceRef to Connector class for unit tests + Connector connector = Connector.fromXContentBytes( + connectorResult.getSourceRef(), + connectorResult.getDocId(), + XContentType.JSON + ); resp.set(connector); latch.countDown(); } @@ -700,4 +719,44 @@ public void onFailure(Exception e) { return resp.get(); } + /** + * Update configuration action is handled via painless script. This implementation mocks the painless script engine + * for unit tests. + */ + private static class MockPainlessScriptEngine extends MockScriptEngine { + + public static final String NAME = "painless"; + + public static class TestPlugin extends MockScriptPlugin { + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ConnectorIndexServiceTests.MockPainlessScriptEngine(); + } + + @Override + protected Map, Object>> pluginScripts() { + return Collections.emptyMap(); + } + } + + @Override + public String getType() { + return NAME; + } + + @Override + public T compile(String name, String script, ScriptContext context, Map options) { + if (context.instanceClazz.equals(UpdateScript.class)) { + UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(params, ctx) { + @Override + public void execute() { + + } + }; + return context.factoryClazz.cast(factory); + } + throw new IllegalArgumentException("mock painless does not know how to handle context [" + context.name + "]"); + } + } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 74b84e914a942..ecfcfcf9e4af4 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; @@ -24,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.core.scheduler.Cron; +import java.io.IOException; import java.time.Instant; import java.util.Collections; import java.util.HashMap; @@ -262,6 +266,30 @@ public static Connector getRandomConnector() { .build(); } + private static BytesReference convertConnectorToBytesReference(Connector connector) { + try { + return XContentHelper.toXContent((builder, params) -> { + connector.toInnerXContent(builder, params); + return builder; + }, XContentType.JSON, null, false); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static Map convertConnectorToGenericMap(Connector connector) { + return XContentHelper.convertToMap(convertConnectorToBytesReference(connector), true, XContentType.JSON).v2(); + } + + public static ConnectorSearchResult getRandomConnectorSearchResult() { + Connector connector = getRandomConnector(); + + return new ConnectorSearchResult.Builder().setResultBytes(convertConnectorToBytesReference(connector)) + .setResultMap(convertConnectorToGenericMap(connector)) + .setId(randomAlphaOfLength(10)) + .build(); + } + private static ConnectorFeatures.FeatureEnabled randomConnectorFeatureEnabled() { return new ConnectorFeatures.FeatureEnabled(randomBoolean()); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java index 168e9ec8f433e..cc47e9d35afb0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java @@ -9,16 +9,12 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; -import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import java.io.IOException; -public class GetConnectorActionResponseBWCSerializingTests extends AbstractBWCSerializationTestCase { - - private Connector connector; +public class GetConnectorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { @Override protected Writeable.Reader instanceReader() { @@ -27,8 +23,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetConnectorAction.Response createTestInstance() { - this.connector = ConnectorTestUtils.getRandomConnector(); - return new GetConnectorAction.Response(this.connector); + return new GetConnectorAction.Response(ConnectorTestUtils.getRandomConnectorSearchResult()); } @Override @@ -36,11 +31,6 @@ protected GetConnectorAction.Response mutateInstance(GetConnectorAction.Response return randomValueOtherThan(instance, this::createTestInstance); } - @Override - protected GetConnectorAction.Response doParseInstance(XContentParser parser) throws IOException { - return GetConnectorAction.Response.fromXContent(parser, connector.getConnectorId()); - } - @Override protected GetConnectorAction.Response mutateInstanceForVersion(GetConnectorAction.Response instance, TransportVersion version) { return instance; diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java index 1e4ee0d086462..ac8c85def542e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java @@ -22,7 +22,10 @@ protected Writeable.Reader instanceReader() { @Override protected ListConnectorAction.Response createTestInstance() { - return new ListConnectorAction.Response(randomList(10, ConnectorTestUtils::getRandomConnector), randomLongBetween(0, 100)); + return new ListConnectorAction.Response( + randomList(10, ConnectorTestUtils::getRandomConnectorSearchResult), + randomLongBetween(0, 100) + ); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java index f9a548a47feb3..b93c83c6494f3 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.application.connector.secrets; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; @@ -42,6 +44,18 @@ public void testCreateAndGetConnectorSecret() throws Exception { assertThat(gotSecret.value(), notNullValue()); } + public void testDeleteConnectorSecret() throws Exception { + PostConnectorSecretRequest createSecretRequest = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); + PostConnectorSecretResponse createdSecret = awaitPostConnectorSecret(createSecretRequest); + + String secretIdToDelete = createdSecret.id(); + DeleteConnectorSecretResponse resp = awaitDeleteConnectorSecret(secretIdToDelete); + assertThat(resp.isDeleted(), equalTo(true)); + + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorSecret(secretIdToDelete)); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSecret(secretIdToDelete)); + } + private PostConnectorSecretResponse awaitPostConnectorSecret(PostConnectorSecretRequest secretRequest) throws Exception { CountDownLatch latch = new CountDownLatch(1); @@ -101,4 +115,31 @@ public void onFailure(Exception e) { assertNotNull("Received null response from get request", resp.get()); return resp.get(); } + + private DeleteConnectorSecretResponse awaitDeleteConnectorSecret(String connectorSecretId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSecretsIndexService.deleteSecret(connectorSecretId, new ActionListener() { + @Override + public void onResponse(DeleteConnectorSecretResponse response) { + resp.set(response); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for delete request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from delete request", resp.get()); + return resp.get(); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java index 5928ed4a1e5cd..13051505f9c4d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.application.connector.secrets; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; @@ -14,6 +16,7 @@ import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; public class ConnectorSecretsTestUtils { @@ -34,4 +37,12 @@ public static PostConnectorSecretRequest getRandomPostConnectorSecretRequest() { public static PostConnectorSecretResponse getRandomPostConnectorSecretResponse() { return new PostConnectorSecretResponse(randomAlphaOfLength(10)); } + + public static DeleteConnectorSecretRequest getRandomDeleteConnectorSecretRequest() { + return new DeleteConnectorSecretRequest(randomAlphaOfLengthBetween(1, 20)); + } + + public static DeleteConnectorSecretResponse getRandomDeleteConnectorSecretResponse() { + return new DeleteConnectorSecretResponse(randomBoolean()); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java new file mode 100644 index 0000000000000..5d9127527fc3a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretActionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class DeleteConnectorSecretActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError() { + DeleteConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + DeleteConnectorSecretRequest requestWithMissingConnectorId = new DeleteConnectorSecretRequest(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("id missing")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..bdbdb1982173e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretRequestBWCSerializingTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class DeleteConnectorSecretRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorSecretRequest::new; + } + + @Override + protected DeleteConnectorSecretRequest createTestInstance() { + return new DeleteConnectorSecretRequest(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected DeleteConnectorSecretRequest mutateInstance(DeleteConnectorSecretRequest instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorSecretRequest mutateInstanceForVersion(DeleteConnectorSecretRequest instance, TransportVersion version) { + return new DeleteConnectorSecretRequest(instance.id()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..964c5e15d845d --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/DeleteConnectorSecretResponseBWCSerializingTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class DeleteConnectorSecretResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + public NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorSecretResponse::new; + } + + @Override + protected DeleteConnectorSecretResponse createTestInstance() { + return ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretResponse(); + } + + @Override + protected DeleteConnectorSecretResponse mutateInstance(DeleteConnectorSecretResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorSecretResponse mutateInstanceForVersion(DeleteConnectorSecretResponse instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java new file mode 100644 index 0000000000000..165cc560ada1a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportDeleteConnectorSecretActionTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportDeleteConnectorSecretActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportDeleteConnectorSecretAction action; + + @Before + public void setup() { + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportDeleteConnectorSecretAction(transportService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testDeleteConnectorSecret_ExpectNoWarnings() throws InterruptedException { + DeleteConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomDeleteConnectorSecretRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(DeleteConnectorSecretRequest request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for delete request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 170ed25c0b302..2bbcf6c74b6fd 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; @@ -100,7 +101,6 @@ public void testCreateConnectorSyncJob() throws Exception { ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); - assertThat(connectorSyncJob.getId(), notNullValue()); assertThat(connectorSyncJob.getJobType(), equalTo(requestJobType)); assertThat(connectorSyncJob.getTriggerMethod(), equalTo(requestTriggerMethod)); assertThat(connectorSyncJob.getStatus(), equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); @@ -283,11 +283,31 @@ public void testListConnectorSyncJobs() throws Exception { ConnectorSyncJobIndexService.ConnectorSyncJobsResult nextTwoSyncJobs = awaitListConnectorSyncJobs(2, 2, null, null); ConnectorSyncJobIndexService.ConnectorSyncJobsResult lastSyncJobs = awaitListConnectorSyncJobs(4, 100, null, null); - ConnectorSyncJob firstSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(0); - ConnectorSyncJob secondSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(1); - ConnectorSyncJob thirdSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(0); - ConnectorSyncJob fourthSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(1); - ConnectorSyncJob fifthSyncJob = lastSyncJobs.connectorSyncJobs().get(0); + ConnectorSyncJob firstSyncJob = ConnectorSyncJob.fromXContentBytes( + firstTwoSyncJobs.connectorSyncJobs().get(0).getSourceRef(), + firstTwoSyncJobs.connectorSyncJobs().get(0).getDocId(), + XContentType.JSON + ); + ConnectorSyncJob secondSyncJob = ConnectorSyncJob.fromXContentBytes( + firstTwoSyncJobs.connectorSyncJobs().get(1).getSourceRef(), + firstTwoSyncJobs.connectorSyncJobs().get(1).getDocId(), + XContentType.JSON + ); + ConnectorSyncJob thirdSyncJob = ConnectorSyncJob.fromXContentBytes( + nextTwoSyncJobs.connectorSyncJobs().get(0).getSourceRef(), + nextTwoSyncJobs.connectorSyncJobs().get(0).getDocId(), + XContentType.JSON + ); + ConnectorSyncJob fourthSyncJob = ConnectorSyncJob.fromXContentBytes( + nextTwoSyncJobs.connectorSyncJobs().get(1).getSourceRef(), + nextTwoSyncJobs.connectorSyncJobs().get(1).getDocId(), + XContentType.JSON + ); + ConnectorSyncJob fifthSyncJob = ConnectorSyncJob.fromXContentBytes( + lastSyncJobs.connectorSyncJobs().get(0).getSourceRef(), + lastSyncJobs.connectorSyncJobs().get(0).getDocId(), + XContentType.JSON + ); assertThat(firstTwoSyncJobs.connectorSyncJobs().size(), equalTo(2)); assertThat(firstTwoSyncJobs.totalResults(), equalTo(5L)); @@ -337,7 +357,7 @@ public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancel ConnectorSyncStatus.PENDING ); long numberOfResults = connectorSyncJobsResult.totalResults(); - String idOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getId(); + String idOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getDocId(); assertThat(numberOfResults, equalTo(1L)); assertThat(idOfReturnedSyncJob, equalTo(syncJobOneId)); @@ -363,7 +383,11 @@ public void testListConnectorSyncJobs_WithConnectorOneId_GivenTwoOverallOneFromC ); long numberOfResults = connectorSyncJobsResult.totalResults(); - String connectorIdOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getConnector().getConnectorId(); + String connectorIdOfReturnedSyncJob = ConnectorSyncJob.fromXContentBytes( + connectorSyncJobsResult.connectorSyncJobs().get(0).getSourceRef(), + connectorSyncJobsResult.connectorSyncJobs().get(0).getDocId(), + XContentType.JSON + ).getConnector().getConnectorId(); assertThat(numberOfResults, equalTo(1L)); assertThat(connectorIdOfReturnedSyncJob, equalTo(connectorOneId)); @@ -699,9 +723,15 @@ private ConnectorSyncJob awaitGetConnectorSyncJob(String connectorSyncJobId) thr final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorSyncJobIndexService.getConnectorSyncJob(connectorSyncJobId, new ActionListener() { + connectorSyncJobIndexService.getConnectorSyncJob(connectorSyncJobId, new ActionListener() { @Override - public void onResponse(ConnectorSyncJob connectorSyncJob) { + public void onResponse(ConnectorSyncJobSearchResult searchResult) { + // Serialize the sourceRef to ConnectorSyncJob class for unit tests + ConnectorSyncJob connectorSyncJob = ConnectorSyncJob.fromXContentBytes( + searchResult.getSourceRef(), + searchResult.getDocId(), + XContentType.JSON + ); resp.set(connectorSyncJob); latch.countDown(); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 96a12c9efac51..c53231cd79219 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -7,7 +7,10 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; @@ -19,7 +22,9 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import java.io.IOException; import java.time.Instant; +import java.util.Map; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; @@ -65,6 +70,30 @@ public static ConnectorSyncJob getRandomConnectorSyncJob() { .build(); } + private static BytesReference convertSyncJobToBytesReference(ConnectorSyncJob syncJob) { + try { + return XContentHelper.toXContent((builder, params) -> { + syncJob.toInnerXContent(builder, params); + return builder; + }, XContentType.JSON, null, false); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static Map convertSyncJobToGenericMap(ConnectorSyncJob syncJob) { + return XContentHelper.convertToMap(convertSyncJobToBytesReference(syncJob), true, XContentType.JSON).v2(); + } + + public static ConnectorSyncJobSearchResult getRandomSyncJobSearchResult() { + ConnectorSyncJob syncJob = getRandomConnectorSyncJob(); + + return new ConnectorSyncJobSearchResult.Builder().setId(randomAlphaOfLength(10)) + .setResultMap(convertSyncJobToGenericMap(syncJob)) + .setResultBytes(convertSyncJobToBytesReference(syncJob)) + .build(); + } + public static ConnectorSyncJobTriggerMethod getRandomConnectorSyncJobTriggerMethod() { ConnectorSyncJobTriggerMethod[] values = ConnectorSyncJobTriggerMethod.values(); return values[randomInt(values.length - 1)]; @@ -146,7 +175,7 @@ public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequ } public static GetConnectorSyncJobAction.Response getRandomGetConnectorSyncJobResponse() { - return new GetConnectorSyncJobAction.Response(getRandomConnectorSyncJob()); + return new GetConnectorSyncJobAction.Response(getRandomSyncJobSearchResult()); } public static ListConnectorSyncJobsAction.Request getRandomListConnectorSyncJobsActionRequest() { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index 64f11923ce164..7b1a0f7d8dcf7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -87,7 +87,6 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { "created_at": "2023-12-01T14:18:43.07693Z", "deleted_document_count": 10, "error": "some-error", - "id": "HIC-JYwB9RqKhB7x_hIE", "indexed_document_count": 10, "indexed_document_volume": 10, "job_type": "full", @@ -101,7 +100,7 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { } """); - ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes(new BytesArray(content), "HIC-JYwB9RqKhB7x_hIE", XContentType.JSON); assertThat(syncJob.getCancelationRequestedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); assertThat(syncJob.getCanceledAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); @@ -170,7 +169,6 @@ public void testFromXContent_WithOnlyNonNullableFieldsSet_DoesNotThrow() throws }, "created_at": "2023-12-01T14:18:43.07693Z", "deleted_document_count": 10, - "id": "HIC-JYwB9RqKhB7x_hIE", "indexed_document_count": 10, "indexed_document_volume": 10, "job_type": "full", @@ -182,7 +180,7 @@ public void testFromXContent_WithOnlyNonNullableFieldsSet_DoesNotThrow() throws } """); - ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + ConnectorSyncJob.fromXContentBytes(new BytesArray(content), "HIC-JYwB9RqKhB7x_hIE", XContentType.JSON); } public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throws IOException { @@ -230,7 +228,6 @@ public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throw "created_at": "2023-12-01T14:18:43.07693Z", "deleted_document_count": 10, "error": null, - "id": "HIC-JYwB9RqKhB7x_hIE", "indexed_document_count": 10, "indexed_document_volume": 10, "job_type": "full", @@ -244,7 +241,7 @@ public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throw } """); - ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + ConnectorSyncJob.fromXContentBytes(new BytesArray(content), "HIC-JYwB9RqKhB7x_hIE", XContentType.JSON); } public void testSyncJobConnectorFromXContent_WithAllFieldsSet() throws IOException { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java index 48a358ad043cd..bc7b6320dddbe 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java @@ -33,7 +33,7 @@ protected Writeable.Reader instanceReader( @Override protected ListConnectorSyncJobsAction.Response createTestInstance() { return new ListConnectorSyncJobsAction.Response( - randomList(10, ConnectorSyncJobTestUtils::getRandomConnectorSyncJob), + randomList(10, ConnectorSyncJobTestUtils::getRandomSyncJobSearchResult), randomLongBetween(0, 100) ); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index 011b0d09fd8c5..a2309c48578a3 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -18,7 +18,7 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; @@ -95,7 +95,7 @@ public static ActionListener multiSearchLogListener(ActionL } private static void logSearchResponse(SearchResponse response, Logger logger) { - List aggs = Collections.emptyList(); + List aggs = Collections.emptyList(); if (response.getAggregations() != null) { aggs = response.getAggregations().asList(); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java index b880ec4b06926..afb9b590914dd 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; @@ -221,7 +221,7 @@ protected void @SuppressWarnings("unchecked") void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { - Aggregations aggs = new Aggregations(List.of(newInternalComposite())); + InternalAggregations aggs = InternalAggregations.from(List.of(newInternalComposite())); ActionListener.respondAndRelease( listener, (Response) new SearchResponse( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index c8a6dd9128d16..6f8fd67d348d6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -108,7 +108,10 @@ public void addCompletionListener(ActionListener listener) { completionFuture.addListener(listener); } - boolean isFinished() { + /** + * Returns true if an exchange is finished + */ + public boolean isFinished() { return completionFuture.isDone(); } diff --git a/x-pack/plugin/esql/qa/server/build.gradle b/x-pack/plugin/esql/qa/server/build.gradle index ff7ace533fb3a..fe5e08cda32f7 100644 --- a/x-pack/plugin/esql/qa/server/build.gradle +++ b/x-pack/plugin/esql/qa/server/build.gradle @@ -7,5 +7,7 @@ dependencies { // Common utilities from QL api project(xpackModule('ql:test-fixtures')) + // Requirement for some ESQL-specific utilities + implementation project(':x-pack:plugin:esql') api project(xpackModule('esql:qa:testFixtures')) } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 100895feade16..9009441945509 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -46,11 +46,14 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.IntFunction; import static java.util.Collections.emptySet; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.ASYNC; import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.SYNC; import static org.hamcrest.Matchers.containsString; @@ -71,6 +74,29 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final List NO_WARNINGS = List.of(); + private static final String MAPPING_ALL_TYPES; + + static { + try (InputStream mappingPropertiesStream = RestEsqlTestCase.class.getResourceAsStream("/mapping-all-types.json")) { + String properties = new String(mappingPropertiesStream.readAllBytes(), StandardCharsets.UTF_8); + MAPPING_ALL_TYPES = "{\"mappings\": " + properties + "}"; + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + + private static final String DOCUMENT_TEMPLATE = """ + {"index":{"_id":"{}"}} + {"boolean": {}, "byte": {}, "date": {}, "double": {}, "float": {}, "half_float": {}, "scaled_float": {}, "integer": {},""" + """ + "ip": {}, "keyword": {}, "long": {}, "unsigned_long": {}, "short": {}, "text": {},""" + """ + "version": {}, "wildcard": {}} + """; + + // larger than any (unsigned) long + private static final String HUMONGOUS_DOUBLE = "1E300"; + private static final String INFINITY = "1.0/0.0"; + private static final String NAN = "0.0/0.0"; + public static boolean shouldLog() { return false; } @@ -295,6 +321,81 @@ public void testCSVNoHeaderMode() throws IOException { assertEquals("keyword0,0\r\n", actual); } + public void testOutOfRangeComparisons() throws IOException { + final int NUM_SINGLE_VALUE_ROWS = 100; + bulkLoadTestData(NUM_SINGLE_VALUE_ROWS); + bulkLoadTestData(10, NUM_SINGLE_VALUE_ROWS, false, RestEsqlTestCase::createDocumentWithMVs); + bulkLoadTestData(5, NUM_SINGLE_VALUE_ROWS + 10, false, RestEsqlTestCase::createDocumentWithNulls); + + List dataTypes = List.of( + "alias_integer", + "byte", + "short", + "integer", + "long", + // TODO: https://github.com/elastic/elasticsearch/issues/102935 + // "unsigned_long", + // TODO: https://github.com/elastic/elasticsearch/issues/100130 + // "half_float", + // "float", + "double", + "scaled_float" + ); + + String lessOrLessEqual = randomFrom(" < ", " <= "); + String largerOrLargerEqual = randomFrom(" > ", " >= "); + String inEqualPlusMinus = randomFrom(" != ", " != -"); + String equalPlusMinus = randomFrom(" == ", " == -"); + // TODO: once we do not support infinity and NaN anymore, remove INFINITY/NAN cases. + // https://github.com/elastic/elasticsearch/issues/98698#issuecomment-1847423390 + String humongousPositiveLiteral = randomFrom(HUMONGOUS_DOUBLE, INFINITY); + String nanOrNull = randomFrom(NAN, "to_double(null)"); + + List trueForSingleValuesPredicates = List.of( + lessOrLessEqual + humongousPositiveLiteral, + largerOrLargerEqual + " -" + humongousPositiveLiteral, + inEqualPlusMinus + humongousPositiveLiteral, + inEqualPlusMinus + NAN + ); + List alwaysFalsePredicates = List.of( + lessOrLessEqual + " -" + humongousPositiveLiteral, + largerOrLargerEqual + humongousPositiveLiteral, + equalPlusMinus + humongousPositiveLiteral, + lessOrLessEqual + nanOrNull, + largerOrLargerEqual + nanOrNull, + equalPlusMinus + nanOrNull, + inEqualPlusMinus + "to_double(null)" + ); + + for (String fieldWithType : dataTypes) { + for (String truePredicate : trueForSingleValuesPredicates) { + String comparison = fieldWithType + truePredicate; + var query = builder().query(format(null, "from {} | where {}", testIndexName(), comparison)); + List expectedWarnings = List.of( + "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" + ); + var result = runEsql(query, expectedWarnings, mode); + + var values = as(result.get("values"), ArrayList.class); + assertThat( + format(null, "Comparison [{}] should return all rows with single values.", comparison), + values.size(), + is(NUM_SINGLE_VALUE_ROWS) + ); + } + + for (String falsePredicate : alwaysFalsePredicates) { + String comparison = fieldWithType + falsePredicate; + var query = builder().query(format(null, "from {} | where {}", testIndexName(), comparison)); + var result = runEsql(query); + + var values = as(result.get("values"), ArrayList.class); + assertThat(format(null, "Comparison [{}] should return no rows.", comparison), values.size(), is(0)); + } + } + } + public void testWarningHeadersOnFailedConversions() throws IOException { int count = randomFrom(10, 40, 60); bulkLoadTestData(count); @@ -720,37 +821,90 @@ private static Set mutedWarnings() { } private static void bulkLoadTestData(int count) throws IOException { - Request request = new Request("PUT", "/" + testIndexName()); - request.setJsonEntity(""" - { - "mappings": { - "properties": { - "keyword": { - "type": "keyword" - }, - "integer": { - "type": "integer" - } - } - } - }"""); - assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + bulkLoadTestData(count, 0, true, RestEsqlTestCase::createDocument); + } + + private static void bulkLoadTestData(int count, int firstIndex, boolean createIndex, IntFunction createDocument) + throws IOException { + Request request; + if (createIndex) { + request = new Request("PUT", "/" + testIndexName()); + request.setJsonEntity(MAPPING_ALL_TYPES); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + } if (count > 0) { request = new Request("POST", "/" + testIndexName() + "/_bulk"); request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); for (int i = 0; i < count; i++) { - bulk.append(org.elasticsearch.core.Strings.format(""" - {"index":{"_id":"%s"}} - {"keyword":"keyword%s", "integer":%s} - """, i, i, i)); + bulk.append(createDocument.apply(i + firstIndex)); } request.setJsonEntity(bulk.toString()); assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); } } + private static String createDocument(int i) { + return format( + null, + DOCUMENT_TEMPLATE, + i, + ((i & 1) == 0), + (i % 256), + i, + (i + 0.1), + (i + 0.1), + (i + 0.1), + (i + 0.1), + i, + "\"127.0.0." + (i % 256) + "\"", + "\"keyword" + i + "\"", + i, + i, + (i % Short.MAX_VALUE), + "\"text" + i + "\"", + "\"1.2." + i + "\"", + "\"wildcard" + i + "\"" + ); + } + + private static String createDocumentWithMVs(int i) { + return format( + null, + DOCUMENT_TEMPLATE, + i, + repeatValueAsMV((i & 1) == 0), + repeatValueAsMV(i % 256), + repeatValueAsMV(i), + repeatValueAsMV(i + 0.1), + repeatValueAsMV(i + 0.1), + repeatValueAsMV(i + 0.1), + repeatValueAsMV(i + 0.1), + repeatValueAsMV(i), + repeatValueAsMV("\"127.0.0." + (i % 256) + "\""), + repeatValueAsMV("\"keyword" + i + "\""), + repeatValueAsMV(i), + repeatValueAsMV(i), + repeatValueAsMV(i % Short.MAX_VALUE), + repeatValueAsMV("\"text" + i + "\""), + repeatValueAsMV("\"1.2." + i + "\""), + repeatValueAsMV("\"wildcard" + i + "\"") + ); + } + + private static String createDocumentWithNulls(int i) { + return format(null, """ + {"index":{"_id":"{}"}} + {} + """, i); + } + + private static String repeatValueAsMV(Object value) { + return "[" + value + ", " + value + "]"; + } + private static RequestObjectBuilder builder() throws IOException { return new RequestObjectBuilder(); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-all-types.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-all-types.json new file mode 100644 index 0000000000000..ee1ef56a63dfb --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-all-types.json @@ -0,0 +1,61 @@ +{ + "properties" : { + "alias_integer": { + "type": "alias", + "path": "integer" + }, + "boolean": { + "type": "boolean" + }, + "byte" : { + "type" : "byte" + }, + "constant_keyword-foo": { + "type": "constant_keyword", + "value": "foo" + }, + "date": { + "type": "date" + }, + "double": { + "type": "double" + }, + "float": { + "type": "float" + }, + "half_float": { + "type": "half_float" + }, + "scaled_float": { + "type": "scaled_float", + "scaling_factor": 100 + }, + "integer" : { + "type" : "integer" + }, + "ip": { + "type": "ip" + }, + "keyword" : { + "type" : "keyword" + }, + "long": { + "type": "long" + }, + "unsigned_long": { + "type": "unsigned_long" + }, + "short": { + "type": "short" + }, + "text" : { + "type" : "text" + }, + "version": { + "type": "version" + }, + "wildcard": { + "type": "wildcard" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index e1c1b276a90eb..16a4ebf8fb03e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -16,7 +16,7 @@ asin |"double asin(n:double|integer|long|unsigned_long)"|n atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Inverse tangent trigonometric function." | false | false | false atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false -avg |"double avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "The average of a numeric field." | false | false | true +avg |"double avg(field:double|integer|long)" |field |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false @@ -25,7 +25,7 @@ concat |"keyword concat(first:keyword|text, rest...:keyword|te cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true -count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true +count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false @@ -40,10 +40,10 @@ left |"keyword left(str:keyword|text, length:integer)" length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false -max |"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The maximum value of a numeric field." | false | false | true -median |"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The value that is greater than half of all values and less than half of all values." | false | false | true -median_absolute_deviation|"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The median absolute deviation, a measure of variability." | false | false | true -min |"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The minimum value of a numeric field." | false | false | true +max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long median(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long median_absolute_deviation(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long min(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false @@ -55,7 +55,7 @@ mv_median |"double|integer|long|unsigned_long mv_median(v:double| mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false now |date now() | null |null | null |date | "Returns current date and time." | null | false | false -percentile |"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long|unsigned_long, double|integer|long"] |["", ""] |"double|integer|long|unsigned_long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +percentile |"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false @@ -69,7 +69,7 @@ sqrt |"double sqrt(n:double|integer|long|unsigned_long)" st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false -sum |"long sum(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |long | "The sum of a numeric field." | false | false | true +sum |"long sum(field:double|integer|long)" |field |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false @@ -112,7 +112,7 @@ synopsis:keyword "double atan(n:double|integer|long|unsigned_long)" "double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" "double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" -"double avg(field:double|integer|long|unsigned_long)" +"double avg(field:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" boolean cidr_match(ip:ip, blockX...:keyword) @@ -121,7 +121,7 @@ boolean cidr_match(ip:ip, blockX...:keyword) "double cos(n:double|integer|long|unsigned_long)" "double cosh(n:double|integer|long|unsigned_long)" "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version, ?precision:integer)" +"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" long date_extract(date_part:keyword, field:date) keyword date_format(?format:keyword, date:date) @@ -136,10 +136,10 @@ double e() "integer length(str:keyword|text)" "double log10(n:double|integer|long|unsigned_long)" "keyword|text ltrim(str:keyword|text)" -"double|integer|long|unsigned_long max(field:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long median(field:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long median_absolute_deviation(field:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" +"double|integer|long max(field:double|integer|long)" +"double|integer|long median(field:double|integer|long)" +"double|integer|long median_absolute_deviation(field:double|integer|long)" +"double|integer|long min(field:double|integer|long)" "double mv_avg(field:double|integer|long|unsigned_long)" "keyword mv_concat(v:text|keyword, delim:text|keyword)" "integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" @@ -151,7 +151,7 @@ double e() "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" date now() -"double|integer|long|unsigned_long percentile(field:double|integer|long|unsigned_long, percentile:double|integer|long)" +"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" double pi() "double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" "keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" @@ -165,7 +165,7 @@ double pi() "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" "boolean starts_with(str:keyword|text, prefix:keyword|text)" "keyword substring(str:keyword|text, start:integer, ?length:integer)" -"long sum(field:double|integer|long|unsigned_long)" +"long sum(field:double|integer|long)" "double tan(n:double|integer|long|unsigned_long)" "double tanh(n:double|integer|long|unsigned_long)" double tau() diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 0dd2f4f937421..65b01aae461e5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -831,8 +831,10 @@ FROM employees // end::statsCalcMultipleValues[] ; +// tag::statsCalcMultipleValues-result[] avg_lang:double | max_lang:integer 3.1222222222222222|5 +// end::statsCalcMultipleValues-result[] ; docsStatsGroupByMultipleValues @@ -983,3 +985,130 @@ ROW a = 1, c = null COUNT(c):long | a:integer 0 | 1 ; + + +countVersion#[skip:-8.12.99,reason:bug fixed in 8.13+] +from apps | stats c = count(version), cd = count_distinct(version); + +c:long | cd:long +12 | 9 +; + + +docsStatsAvgNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsAvgNestedExpression[] +FROM employees +| STATS avg_salary_change = AVG(MV_AVG(salary_change)) +// end::docsStatsAvgNestedExpression[] +; + +// tag::docsStatsAvgNestedExpression-result[] +avg_salary_change:double +1.3904535864978902 +// end::docsStatsAvgNestedExpression-result[] +; + +docsStatsByExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsByExpression[] +FROM employees +| STATS my_count = COUNT() BY LEFT(last_name, 1) +| SORT `LEFT(last_name, 1)` +// end::docsStatsByExpression[] +; + +// tag::docsStatsByExpression-result[] +my_count:long |LEFT(last_name, 1):keyword +2 |A +11 |B +5 |C +5 |D +2 |E +4 |F +4 |G +6 |H +2 |J +3 |K +5 |L +12 |M +4 |N +1 |O +7 |P +5 |R +13 |S +4 |T +2 |W +3 |Z +// end::docsStatsByExpression-result[] +; + +docsStatsMaxNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsMaxNestedExpression[] +FROM employees +| STATS max_avg_salary_change = MAX(MV_AVG(salary_change)) +// end::docsStatsMaxNestedExpression[] +; + +// tag::docsStatsMaxNestedExpression-result[] +max_avg_salary_change:double +13.75 +// end::docsStatsMaxNestedExpression-result[] +; + +docsStatsMinNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsMinNestedExpression[] +FROM employees +| STATS min_avg_salary_change = MIN(MV_AVG(salary_change)) +// end::docsStatsMinNestedExpression[] +; + +// tag::docsStatsMinNestedExpression-result[] +min_avg_salary_change:double +-8.46 +// end::docsStatsMinNestedExpression-result[] +; + +docsStatsSumNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsSumNestedExpression[] +FROM employees +| STATS total_salary_changes = SUM(MV_MAX(salary_change)) +// end::docsStatsSumNestedExpression[] +; + +// tag::docsStatsSumNestedExpression-result[] +total_salary_changes:double +446.75 +// end::docsStatsSumNestedExpression-result[] +; + +docsCountWithExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsCountWithExpression[] +ROW words="foo;bar;baz;qux;quux;foo" +| STATS word_count = COUNT(SPLIT(words, ";")) +// end::docsCountWithExpression[] +; + +// tag::docsCountWithExpression-result[] +word_count:long +6 +// end::docsCountWithExpression-result[] +; + +countMultiValuesRow +ROW keyword_field = ["foo", "bar"], int_field = [1, 2, 3] | STATS ck = COUNT(keyword_field), ci = COUNT(int_field), c = COUNT(*); + +ck:l | ci:l | c:l +2 | 3 | 1 +; + +countSource +FROM employees | +STATS ck = COUNT(job_positions), + cb = COUNT(is_rehired), + cd = COUNT(salary_change), + ci = COUNT(salary_change.int), + c = COUNT(*), + csv = COUNT(emp_no); + +ck:l | cb:l | cd:l | ci:l | c:l | csv:l +221 | 204 | 183 | 183 | 100 | 100 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec index 8f926fd8f6ed7..b4f6a701ec272 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_count_distinct.csv-spec @@ -153,6 +153,18 @@ m:long | languages:i 10 | null ; +docsCountDistinctWithExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsCountDistinctWithExpression[] +ROW words="foo;bar;baz;qux;quux;foo" +| STATS distinct_word_count = COUNT_DISTINCT(SPLIT(words, ";")) +// end::docsCountDistinctWithExpression[] +; + +// tag::docsCountDistinctWithExpression-result[] +distinct_word_count:long +5 +// end::docsCountDistinctWithExpression-result[] +; countDistinctWithGroupPrecisionAndNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] from employees | stats m = count_distinct(height + 5, 9876) by languages | sort languages; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec index 091a625c7e10d..8ac93dc5455bd 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec @@ -156,3 +156,42 @@ from employees | stats p50 = percentile(salary_change, -(50-1)+99); p50:double 0.75 ; + +docsStatsMedianNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsMedianNestedExpression[] +FROM employees +| STATS median_max_salary_change = MEDIAN(MV_MAX(salary_change)) +// end::docsStatsMedianNestedExpression[] +; + +// tag::docsStatsMedianNestedExpression-result[] +median_max_salary_change:double +7.69 +// end::docsStatsMedianNestedExpression-result[] +; + +docsStatsMADNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsMADNestedExpression[] +FROM employees +| STATS m_a_d_max_salary_change = MEDIAN_ABSOLUTE_DEVIATION(MV_MAX(salary_change)) +// end::docsStatsMADNestedExpression[] +; + +// tag::docsStatsMADNestedExpression-result[] +m_a_d_max_salary_change:double +5.69 +// end::docsStatsMADNestedExpression-result[] +; + +docsStatsPercentileNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] +// tag::docsStatsPercentileNestedExpression[] +FROM employees +| STATS p80_max_salary_change = PERCENTILE(MV_MAX(salary_change), 80) +// end::docsStatsPercentileNestedExpression[] +; + +// tag::docsStatsPercentileNestedExpression-result[] +p80_max_salary_change:double +12.132 +// end::docsStatsPercentileNestedExpression-result[] +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 0590caf2019b4..5ba9c622d85da 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -185,6 +185,9 @@ protected static QueryPragmas randomPragmas() { }; settings.put("page_size", pageSize); } + if (randomBoolean()) { + settings.put("max_concurrent_shards_per_node", randomIntBetween(1, 10)); + } } return new QueryPragmas(settings.build()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java index a39439d33bfba..fb598cb855013 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java @@ -13,11 +13,20 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.MockSearchService; +import org.elasticsearch.search.SearchService; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.Matchers; +import org.junit.Before; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; /** * Make sures that we can run many concurrent requests with large number of shards with any data_partitioning. @@ -25,7 +34,15 @@ @LuceneTestCase.SuppressFileSystems(value = "HandleLimitFS") public class ManyShardsIT extends AbstractEsqlIntegTestCase { - public void testConcurrentQueries() throws Exception { + @Override + protected Collection> getMockPlugins() { + var plugins = new ArrayList<>(super.getMockPlugins()); + plugins.add(MockSearchService.TestPlugin.class); + return plugins; + } + + @Before + public void setupIndices() { int numIndices = between(10, 20); for (int i = 0; i < numIndices; i++) { String index = "test-" + i; @@ -49,6 +66,9 @@ public void testConcurrentQueries() throws Exception { } bulk.get(); } + } + + public void testConcurrentQueries() throws Exception { int numQueries = between(10, 20); Thread[] threads = new Thread[numQueries]; CountDownLatch latch = new CountDownLatch(1); @@ -76,4 +96,57 @@ public void testConcurrentQueries() throws Exception { thread.join(); } } + + static class SearchContextCounter { + private final int maxAllowed; + private final AtomicInteger current = new AtomicInteger(); + + SearchContextCounter(int maxAllowed) { + this.maxAllowed = maxAllowed; + } + + void onNewContext() { + int total = current.incrementAndGet(); + assertThat("opening more shards than the limit", total, Matchers.lessThanOrEqualTo(maxAllowed)); + } + + void onContextReleased() { + int total = current.decrementAndGet(); + assertThat(total, Matchers.greaterThanOrEqualTo(0)); + } + } + + public void testLimitConcurrentShards() { + Iterable searchServices = internalCluster().getInstances(SearchService.class); + try { + var queries = List.of( + "from test-* | stats count(user) by tags", + "from test-* | stats count(user) by tags | LIMIT 0", + "from test-* | stats count(user) by tags | LIMIT 1", + "from test-* | stats count(user) by tags | LIMIT 1000", + "from test-* | LIMIT 0", + "from test-* | LIMIT 1", + "from test-* | LIMIT 1000", + "from test-* | SORT tags | LIMIT 0", + "from test-* | SORT tags | LIMIT 1", + "from test-* | SORT tags | LIMIT 1000" + ); + for (String q : queries) { + QueryPragmas pragmas = randomPragmas(); + for (SearchService searchService : searchServices) { + SearchContextCounter counter = new SearchContextCounter(pragmas.maxConcurrentShardsPerNode()); + var mockSearchService = (MockSearchService) searchService; + mockSearchService.setOnPutContext(r -> counter.onNewContext()); + mockSearchService.setOnRemoveContext(r -> counter.onContextReleased()); + } + run(q, pragmas).close(); + } + } finally { + for (SearchService searchService : searchServices) { + var mockSearchService = (MockSearchService) searchService; + mockSearchService.setOnPutContext(r -> {}); + mockSearchService.setOnRemoveContext(r -> {}); + } + } + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java index fb6d23695f837..0f05add15da53 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportService; @@ -38,7 +39,11 @@ public void testCollectWarnings() throws Exception { client().admin() .indices() .prepareCreate("index-1") - .setSettings(Settings.builder().put("index.routing.allocation.require._name", node1)) + .setSettings( + Settings.builder() + .put("index.routing.allocation.require._name", node1) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)) + ) .setMapping("host", "type=keyword") ); for (int i = 0; i < numDocs1; i++) { @@ -49,7 +54,11 @@ public void testCollectWarnings() throws Exception { client().admin() .indices() .prepareCreate("index-2") - .setSettings(Settings.builder().put("index.routing.allocation.require._name", node2)) + .setSettings( + Settings.builder() + .put("index.routing.allocation.require._name", node2) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)) + ) .setMapping("host", "type=keyword") ); for (int i = 0; i < numDocs2; i++) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 0ba834d1d8954..784d97f820428 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -21,18 +21,24 @@ import java.util.List; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class Avg extends AggregateFunction implements SurrogateExpression { @FunctionInfo(returnType = "double", description = "The average of a numeric field.", isAggregation = true) - public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @Override protected Expression.TypeResolution resolveType() { - return isNumeric(field(), sourceText(), DEFAULT); + return isType( + field(), + dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "numeric except unsigned_long" + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 62dd3bc6b6254..4e52eecc5e80a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -32,6 +32,7 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isFoldable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class CountDistinct extends AggregateFunction implements OptionalArgument, ToAggregator { private static final int DEFAULT_PRECISION = 3000; @@ -42,19 +43,7 @@ public CountDistinct( Source source, @Param( name = "field", - type = { - "boolean", - "cartesian_point", - "date", - "double", - "geo_point", - "integer", - "ip", - "keyword", - "long", - "text", - "unsigned_long", - "version" }, + type = { "boolean", "cartesian_point", "date", "double", "geo_point", "integer", "ip", "keyword", "long", "text", "version" }, description = "Column or literal for which to count the number of distinct values." ) Expression field, @Param(optional = true, name = "precision", type = { "integer" }) Expression precision @@ -85,10 +74,21 @@ protected TypeResolution resolveType() { } TypeResolution resolution = EsqlTypeResolutions.isExact(field(), sourceText(), DEFAULT); - if (resolution.unresolved() || precision == null) { + if (resolution.unresolved()) { return resolution; } + boolean resolved = resolution.resolved(); + resolution = isType( + field(), + dt -> resolved && dt != DataTypes.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "any exact type except unsigned_long" + ); + if (resolution.unresolved() || precision == null) { + return resolution; + } return isInteger(precision, sourceText(), SECOND).and(isFoldable(precision, sourceText(), SECOND)); } @@ -109,7 +109,7 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { if (type == DataTypes.DOUBLE) { return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, precision); } - if (type == DataTypes.KEYWORD || type == DataTypes.IP || type == DataTypes.TEXT) { + if (type == DataTypes.KEYWORD || type == DataTypes.IP || type == DataTypes.VERSION || type == DataTypes.TEXT) { return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, precision); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index cdcfe20c968a8..d8ec5300c061f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -22,12 +22,8 @@ public class Max extends NumericAggregate { - @FunctionInfo( - returnType = { "double", "integer", "long", "unsigned_long" }, - description = "The maximum value of a numeric field.", - isAggregation = true - ) - public Max(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The maximum value of a numeric field.", isAggregation = true) + public Max(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index 7f5bce981db51..a6f4e30a62459 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -22,22 +22,28 @@ import java.util.List; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class Median extends AggregateFunction implements SurrogateExpression { // TODO: Add the compression parameter @FunctionInfo( - returnType = { "double", "integer", "long", "unsigned_long" }, + returnType = { "double", "integer", "long" }, description = "The value that is greater than half of all values and less than half of all values.", isAggregation = true ) - public Median(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public Median(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @Override protected Expression.TypeResolution resolveType() { - return isNumeric(field(), sourceText(), DEFAULT); + return isType( + field(), + dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "numeric except unsigned_long" + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index ddf0fd15fe2d0..ecf1a47ee9eb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -23,14 +23,11 @@ public class MedianAbsoluteDeviation extends NumericAggregate { // TODO: Add parameter @FunctionInfo( - returnType = { "double", "integer", "long", "unsigned_long" }, + returnType = { "double", "integer", "long" }, description = "The median absolute deviation, a measure of variability.", isAggregation = true ) - public MedianAbsoluteDeviation( - Source source, - @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field - ) { + public MedianAbsoluteDeviation(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 22da614675f9e..8fdce6d959b98 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -22,12 +22,8 @@ public class Min extends NumericAggregate { - @FunctionInfo( - returnType = { "double", "integer", "long", "unsigned_long" }, - description = "The minimum value of a numeric field.", - isAggregation = true - ) - public Min(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The minimum value of a numeric field.", isAggregation = true) + public Min(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java index 297aeb7fc0e29..8e1e38441e9a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java @@ -19,7 +19,7 @@ import java.util.List; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public abstract class NumericAggregate extends AggregateFunction implements ToAggregator { @@ -36,14 +36,20 @@ protected TypeResolution resolveType() { if (supportsDates()) { return TypeResolutions.isType( this, - e -> e.isNumeric() || e == DataTypes.DATETIME, + e -> e == DataTypes.DATETIME || e.isNumeric() && e != DataTypes.UNSIGNED_LONG, sourceText(), DEFAULT, - "numeric", - "datetime" + "datetime", + "numeric except unsigned_long" ); } - return isNumeric(field(), sourceText(), DEFAULT); + return isType( + field(), + dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "numeric except unsigned_long" + ); } protected boolean supportsDates() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index c34783f7352c3..96385d534edcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; @@ -23,18 +24,19 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isFoldable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class Percentile extends NumericAggregate { private final Expression percentile; @FunctionInfo( - returnType = { "double", "integer", "long", "unsigned_long" }, + returnType = { "double", "integer", "long" }, description = "The value at which a certain percentage of observed values occur.", isAggregation = true ) public Percentile( Source source, - @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field, + @Param(name = "field", type = { "double", "integer", "long" }) Expression field, @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile ) { super(source, field, List.of(percentile)); @@ -61,7 +63,13 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = isNumeric(field(), sourceText(), FIRST); + TypeResolution resolution = isType( + field(), + dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + sourceText(), + FIRST, + "numeric except unsigned_long" + ); if (resolution.unresolved()) { return resolution; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 0acf18981a83d..d09762947a597 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -29,7 +29,7 @@ public class Sum extends NumericAggregate { @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) - public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index c375ef24da829..f5cee225b1b13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -254,15 +254,18 @@ private static String dataTypeToString(DataType type, Class aggClass) { return "Long"; } else if (type.equals(DataTypes.DOUBLE)) { return "Double"; - } else if (type.equals(DataTypes.KEYWORD) || type.equals(DataTypes.IP) || type.equals(DataTypes.TEXT)) { - return "BytesRef"; - } else if (type.equals(GEO_POINT)) { - return "GeoPoint"; - } else if (type.equals(CARTESIAN_POINT)) { - return "CartesianPoint"; - } else { - throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); - } + } else if (type.equals(DataTypes.KEYWORD) + || type.equals(DataTypes.IP) + || type.equals(DataTypes.VERSION) + || type.equals(DataTypes.TEXT)) { + return "BytesRef"; + } else if (type.equals(GEO_POINT)) { + return "GeoPoint"; + } else if (type.equals(CARTESIAN_POINT)) { + return "CartesianPoint"; + } else { + throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); + } } private static Expression unwrapAlias(Expression expression) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java index 98b1037c704f6..4dd61def0b2c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java @@ -22,23 +22,39 @@ import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNull; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NullEquals; import org.elasticsearch.xpack.ql.planner.ExpressionTranslator; import org.elasticsearch.xpack.ql.planner.ExpressionTranslators; import org.elasticsearch.xpack.ql.planner.QlTranslatorHandler; import org.elasticsearch.xpack.ql.planner.TranslatorHandler; +import org.elasticsearch.xpack.ql.querydsl.query.MatchAll; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.querydsl.query.TermQuery; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Check; +import java.math.BigDecimal; +import java.math.BigInteger; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; + public final class EsqlTranslatorHandler extends QlTranslatorHandler { public static final List> QUERY_TRANSLATORS = List.of( new EqualsIgnoreCaseTranslator(), + new TrivialBinaryComparisons(), new ExpressionTranslators.BinaryComparisons(), new ExpressionTranslators.Ranges(), new ExpressionTranslators.BinaryLogic(), @@ -124,4 +140,109 @@ static Query translate(InsensitiveEquals bc) { return new TermQuery(source, name, value.utf8ToString(), true); } } + + public static class TrivialBinaryComparisons extends ExpressionTranslator { + @Override + protected Query asQuery(BinaryComparison bc, TranslatorHandler handler) { + ExpressionTranslators.BinaryComparisons.checkBinaryComparison(bc); + Query translated = translate(bc); + return translated == null ? null : handler.wrapFunctionQuery(bc, bc.left(), () -> translated); + } + + private static Query translate(BinaryComparison bc) { + if ((bc.left() instanceof FieldAttribute) == false + || bc.left().dataType().isNumeric() == false + || bc.right().foldable() == false) { + return null; + } + Source source = bc.source(); + Object value = ExpressionTranslators.valueOf(bc.right()); + + // Comparisons with multi-values always return null in ESQL. + if (value instanceof List) { + return new MatchAll(source).negate(source); + } + + DataType valueType = bc.right().dataType(); + DataType attributeDataType = bc.left().dataType(); + if (valueType == UNSIGNED_LONG && value instanceof Long ul) { + value = unsignedLongAsNumber(ul); + } + Number num = (Number) value; + if (isInRange(attributeDataType, valueType, num)) { + return null; + } + + if (Double.isNaN(((Number) value).doubleValue())) { + return new MatchAll(source).negate(source); + } + + boolean matchAllOrNone; + if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { + matchAllOrNone = (num.doubleValue() > 0) == false; + } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + matchAllOrNone = (num.doubleValue() > 0); + } else if (bc instanceof Equals || bc instanceof NullEquals) { + matchAllOrNone = false; + } else if (bc instanceof NotEquals) { + matchAllOrNone = true; + } else { + throw new QlIllegalArgumentException("Unknown binary comparison [{}]", bc); + } + + return matchAllOrNone ? new MatchAll(source) : new MatchAll(source).negate(source); + } + + private static final BigDecimal HALF_FLOAT_MAX = BigDecimal.valueOf(65504); + private static final BigDecimal UNSIGNED_LONG_MAX = BigDecimal.valueOf(2).pow(64).subtract(BigDecimal.ONE); + + private static boolean isInRange(DataType numericFieldDataType, DataType valueDataType, Number value) { + double doubleValue = value.doubleValue(); + if (Double.isNaN(doubleValue) || Double.isInfinite(doubleValue)) { + return false; + } + + BigDecimal decimalValue; + if (value instanceof BigInteger bigIntValue) { + // Unsigned longs may be represented as BigInteger. + decimalValue = new BigDecimal(bigIntValue); + } else { + decimalValue = valueDataType.isRational() ? BigDecimal.valueOf(doubleValue) : BigDecimal.valueOf(value.longValue()); + } + + // Determine min/max for dataType. Use BigDecimals as doubles will have rounding errors for long/ulong. + BigDecimal minValue; + BigDecimal maxValue; + if (numericFieldDataType == DataTypes.BYTE) { + minValue = BigDecimal.valueOf(Byte.MIN_VALUE); + maxValue = BigDecimal.valueOf(Byte.MAX_VALUE); + } else if (numericFieldDataType == DataTypes.SHORT) { + minValue = BigDecimal.valueOf(Short.MIN_VALUE); + maxValue = BigDecimal.valueOf(Short.MAX_VALUE); + } else if (numericFieldDataType == DataTypes.INTEGER) { + minValue = BigDecimal.valueOf(Integer.MIN_VALUE); + maxValue = BigDecimal.valueOf(Integer.MAX_VALUE); + } else if (numericFieldDataType == DataTypes.LONG) { + minValue = BigDecimal.valueOf(Long.MIN_VALUE); + maxValue = BigDecimal.valueOf(Long.MAX_VALUE); + } else if (numericFieldDataType == DataTypes.UNSIGNED_LONG) { + minValue = BigDecimal.ZERO; + maxValue = UNSIGNED_LONG_MAX; + } else if (numericFieldDataType == DataTypes.HALF_FLOAT) { + minValue = HALF_FLOAT_MAX.negate(); + maxValue = HALF_FLOAT_MAX; + } else if (numericFieldDataType == DataTypes.FLOAT) { + minValue = BigDecimal.valueOf(-Float.MAX_VALUE); + maxValue = BigDecimal.valueOf(Float.MAX_VALUE); + } else if (numericFieldDataType == DataTypes.DOUBLE || numericFieldDataType == DataTypes.SCALED_FLOAT) { + // Scaled floats are represented as doubles in ESQL. + minValue = BigDecimal.valueOf(-Double.MAX_VALUE); + maxValue = BigDecimal.valueOf(Double.MAX_VALUE); + } else { + throw new QlIllegalArgumentException("Data type [{}] unsupported for numeric range check", numericFieldDataType); + } + + return minValue.compareTo(decimalValue) <= 0 && maxValue.compareTo(decimalValue) >= 0; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index ef9bd6a9103af..1e988e392590f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -31,6 +31,7 @@ import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.compute.operator.exchange.ExchangeSink; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.core.IOUtils; @@ -369,7 +370,7 @@ private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean } void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) { - listener = ActionListener.runAfter(listener, () -> Releasables.close(context.searchContexts)); + listener = ActionListener.runBefore(listener, () -> Releasables.close(context.searchContexts)); List contexts = new ArrayList<>(context.searchContexts.size()); for (int i = 0; i < context.searchContexts.size(); i++) { SearchContext searchContext = context.searchContexts.get(i); @@ -457,6 +458,8 @@ private void acquireSearchContexts( aliasFilter, clusterAlias ); + // TODO: `searchService.createSearchContext` allows opening search contexts without limits, + // we need to limit the number of active search contexts here or in SearchService SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); searchContexts.add(context); } @@ -576,46 +579,94 @@ void lookupDataNodes( // TODO: Use an internal action here public static final String DATA_ACTION_NAME = EsqlQueryAction.NAME + "/data"; - private class DataNodeRequestHandler implements TransportRequestHandler { - @Override - public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { - final var parentTask = (CancellableTask) task; - final var sessionId = request.sessionId(); - final var exchangeSink = exchangeService.getSinkHandler(sessionId); + private class DataNodeRequestExecutor { + private final DataNodeRequest request; + private final CancellableTask parentTask; + private final ExchangeSinkHandler exchangeSink; + private final ActionListener listener; + private final List driverProfiles; + private final int maxConcurrentShards; + private final ExchangeSink blockingSink; // block until we have completed on all shards or the coordinator has enough data + + DataNodeRequestExecutor( + DataNodeRequest request, + CancellableTask parentTask, + ExchangeSinkHandler exchangeSink, + int maxConcurrentShards, + ActionListener listener + ) { + this.request = request; + this.parentTask = parentTask; + this.exchangeSink = exchangeSink; + this.listener = listener; + this.driverProfiles = request.configuration().profile() ? Collections.synchronizedList(new ArrayList<>()) : List.of(); + this.maxConcurrentShards = maxConcurrentShards; + this.blockingSink = exchangeSink.createExchangeSink(); + } + + void start() { parentTask.addListener( - () -> exchangeService.finishSinkHandler(sessionId, new TaskCancelledException(parentTask.getReasonCancelled())) + () -> exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(parentTask.getReasonCancelled())) ); - final ActionListener listener = new ChannelActionListener<>(channel); + runBatch(0); + } + + private void runBatch(int startBatchIndex) { final EsqlConfiguration configuration = request.configuration(); - String clusterAlias = request.clusterAlias(); - acquireSearchContexts( - clusterAlias, - request.shardIds(), - configuration, - request.aliasFilters(), - ActionListener.wrap(searchContexts -> { - assert ThreadPool.assertCurrentThreadPool(ESQL_THREAD_POOL_NAME); - var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); - runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { - // don't return until all pages are fetched - exchangeSink.addCompletionListener( - ContextPreservingActionListener.wrapPreservingContext( - ActionListener.releaseAfter( - listener.map(nullValue -> new ComputeResponse(driverProfiles)), - () -> exchangeService.finishSinkHandler(sessionId, null) - ), - transportService.getThreadPool().getThreadContext() - ) - ); - }, e -> { - exchangeService.finishSinkHandler(sessionId, e); - listener.onFailure(e); - })); - }, e -> { - exchangeService.finishSinkHandler(sessionId, e); - listener.onFailure(e); - }) + final String clusterAlias = request.clusterAlias(); + final var sessionId = request.sessionId(); + final int endBatchIndex = Math.min(startBatchIndex + maxConcurrentShards, request.shardIds().size()); + List shardIds = request.shardIds().subList(startBatchIndex, endBatchIndex); + acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { + assert ThreadPool.assertCurrentThreadPool(ESQL_THREAD_POOL_NAME, ESQL_WORKER_THREAD_POOL_NAME); + var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); + runCompute( + parentTask, + computeContext, + request.plan(), + ActionListener.wrap(profiles -> onBatchCompleted(endBatchIndex, profiles), this::onFailure) + ); + }, this::onFailure)); + } + + private void onBatchCompleted(int lastBatchIndex, List batchProfiles) { + if (request.configuration().profile()) { + driverProfiles.addAll(batchProfiles); + } + if (lastBatchIndex < request.shardIds().size() && exchangeSink.isFinished() == false) { + runBatch(lastBatchIndex); + } else { + blockingSink.finish(); + // don't return until all pages are fetched + exchangeSink.addCompletionListener( + ContextPreservingActionListener.wrapPreservingContext( + ActionListener.runBefore( + listener.map(nullValue -> new ComputeResponse(driverProfiles)), + () -> exchangeService.finishSinkHandler(request.sessionId(), null) + ), + transportService.getThreadPool().getThreadContext() + ) + ); + } + } + + private void onFailure(Exception e) { + exchangeService.finishSinkHandler(request.sessionId(), e); + listener.onFailure(e); + } + } + + private class DataNodeRequestHandler implements TransportRequestHandler { + @Override + public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { + DataNodeRequestExecutor executor = new DataNodeRequestExecutor( + request, + (CancellableTask) task, + exchangeService.getSinkHandler(request.sessionId()), + request.configuration().pragmas().maxConcurrentShardsPerNode(), + new ChannelActionListener<>(channel) ); + executor.start(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index 65a07c98af29a..2ceee9de9001e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -53,6 +53,8 @@ public final class QueryPragmas implements Writeable { */ public static final Setting STATUS_INTERVAL = Setting.timeSetting("status_interval", Driver.DEFAULT_STATUS_INTERVAL); + public static final Setting MAX_CONCURRENT_SHARDS_PER_NODE = Setting.intSetting("max_concurrent_shards_per_node", 10, 1, 100); + public static final QueryPragmas EMPTY = new QueryPragmas(Settings.EMPTY); private final Settings settings; @@ -114,6 +116,14 @@ public int enrichMaxWorkers() { return ENRICH_MAX_WORKERS.get(settings); } + /** + * The maximum number of shards can be executed concurrently on a single node by this query. This is a safeguard to avoid + * opening and holding many shards (equivalent to many file descriptors) or having too many field infos created by a single query. + */ + public int maxConcurrentShardsPerNode() { + return MAX_CONCURRENT_SHARDS_PER_NODE.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index 1106ecc344db7..d57b42a7a511f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -323,12 +323,12 @@ private static int countEntries(IndexReader indexReader, String field) { if (fieldInfo.getPointIndexDimensionCount() > 0) { PointValues points = reader.getPointValues(field); if (points != null) { - count += points.getDocCount(); + count += points.size(); } } else if (fieldInfo.getIndexOptions() != IndexOptions.NONE) { Terms terms = reader.terms(field); if (terms != null) { - count += terms.getDocCount(); + count += terms.getSumTotalTermFreq(); } } else { return -1; // no shortcut possible for fields that are not indexed diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index a1d5374773eb4..ee77ff93b7687 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1554,6 +1554,48 @@ public void testUnresolvedMvExpand() { assertThat(e.getMessage(), containsString("Unknown column [bar]")); } + public void testUnsupportedTypesInStats() { + verifyUnsupported( + """ + row x = to_unsigned_long(\"10\") + | stats avg(x), count_distinct(x), max(x), median(x), median_absolute_deviation(x), min(x), percentile(x, 10), sum(x) + """, + "Found 8 problems\n" + + "line 2:12: argument of [avg(x)] must be [numeric except unsigned_long], found value [x] type [unsigned_long]\n" + + "line 2:20: argument of [count_distinct(x)] must be [any exact type except unsigned_long], " + + "found value [x] type [unsigned_long]\n" + + "line 2:39: argument of [max(x)] must be [datetime or numeric except unsigned_long], " + + "found value [max(x)] type [unsigned_long]\n" + + "line 2:47: argument of [median(x)] must be [numeric except unsigned_long], found value [x] type [unsigned_long]\n" + + "line 2:58: argument of [median_absolute_deviation(x)] must be [numeric except unsigned_long], " + + "found value [x] type [unsigned_long]\n" + + "line 2:88: argument of [min(x)] must be [datetime or numeric except unsigned_long], " + + "found value [min(x)] type [unsigned_long]\n" + + "line 2:96: first argument of [percentile(x, 10)] must be [numeric except unsigned_long], " + + "found value [x] type [unsigned_long]\n" + + "line 2:115: argument of [sum(x)] must be [numeric except unsigned_long], found value [x] type [unsigned_long]" + ); + + verifyUnsupported( + """ + row x = to_version("1.2") + | stats avg(x), max(x), median(x), median_absolute_deviation(x), min(x), percentile(x, 10), sum(x) + """, + "Found 7 problems\n" + + "line 2:10: argument of [avg(x)] must be [numeric except unsigned_long], found value [x] type [version]\n" + + "line 2:18: argument of [max(x)] must be [datetime or numeric except unsigned_long], " + + "found value [max(x)] type [version]\n" + + "line 2:26: argument of [median(x)] must be [numeric except unsigned_long], found value [x] type [version]\n" + + "line 2:37: argument of [median_absolute_deviation(x)] must be [numeric except unsigned_long], " + + "found value [x] type [version]\n" + + "line 2:67: argument of [min(x)] must be [datetime or numeric except unsigned_long], " + + "found value [min(x)] type [version]\n" + + "line 2:75: first argument of [percentile(x, 10)] must be [numeric except unsigned_long], " + + "found value [x] type [version]\n" + + "line 2:94: argument of [sum(x)] must be [numeric except unsigned_long], found value [x] type [version]" + ); + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 4c8e58fceffde..632c6087cf880 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -74,7 +74,7 @@ public void testAggsExpressionsInStatsAggs() { error("from test | stats max(max(salary)) by first_name") ); assertEquals( - "1:25: argument of [avg(first_name)] must be [numeric], found value [first_name] type [keyword]", + "1:25: argument of [avg(first_name)] must be [numeric except unsigned_long], found value [first_name] type [keyword]", error("from test | stats count(avg(first_name)) by first_name") ); assertEquals( @@ -244,7 +244,7 @@ public void testUnsignedLongNegation() { public void testSumOnDate() { assertEquals( - "1:19: argument of [sum(hire_date)] must be [numeric], found value [hire_date] type [datetime]", + "1:19: argument of [sum(hire_date)] must be [numeric except unsigned_long], found value [hire_date] type [datetime]", error("from test | stats sum(hire_date)") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 9a558daea6de6..7321799efd705 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -44,11 +44,9 @@ import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; -import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.tree.Source; @@ -67,6 +65,7 @@ import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -86,9 +85,8 @@ public class LocalPhysicalPlanOptimizerTests extends ESTestCase { private Analyzer analyzer; private LogicalPlanOptimizer logicalOptimizer; private PhysicalPlanOptimizer physicalPlanOptimizer; + private EsqlFunctionRegistry functionRegistry; private Mapper mapper; - private Map mapping; - private int allFieldRowSize; private final EsqlConfiguration config; private final SearchStats IS_SV_STATS = new TestSearchStats() { @@ -117,24 +115,9 @@ public LocalPhysicalPlanOptimizerTests(String name, EsqlConfiguration config) { @Before public void init() { parser = new EsqlParser(); - - mapping = loadMapping("mapping-basic.json"); - allFieldRowSize = mapping.values() - .stream() - .mapToInt( - f -> (EstimatesRowSize.estimateSize(EsqlDataTypes.widenSmallNumericTypes(f.getDataType())) + f.getProperties() - .values() - .stream() - // check one more level since the mapping contains TEXT fields with KEYWORD multi-fields - .mapToInt(x -> EstimatesRowSize.estimateSize(EsqlDataTypes.widenSmallNumericTypes(x.getDataType()))) - .sum()) - ) - .sum(); - EsIndex test = new EsIndex("test", mapping); - IndexResolution getIndexResult = IndexResolution.valid(test); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); - FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); + functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); EnrichResolution enrichResolution = new EnrichResolution(); enrichResolution.addResolvedPolicy( @@ -151,10 +134,15 @@ public void init() { ) ) ); - analyzer = new Analyzer( - new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), - new Verifier(new Metrics()) - ); + analyzer = makeAnalyzer("mapping-basic.json", enrichResolution); + } + + private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichResolution) { + var mapping = loadMapping(mappingFileName); + EsIndex test = new EsIndex("test", mapping); + IndexResolution getIndexResult = IndexResolution.valid(test); + + return new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), new Verifier(new Metrics())); } /** @@ -427,6 +415,115 @@ public void testIsNullPushdownFilter() { assertThat(query.query().toString(), is(expected.toString())); } + private record OutOfRangeTestCase(String fieldName, String tooLow, String tooHigh) {}; + + public void testOutOfRangeFilterPushdown() { + var allTypeMappingAnalyzer = makeAnalyzer("mapping-all-types.json", new EnrichResolution()); + + String largerThanInteger = String.valueOf(randomLongBetween(Integer.MAX_VALUE + 1L, Long.MAX_VALUE)); + String smallerThanInteger = String.valueOf(randomLongBetween(Long.MIN_VALUE, Integer.MIN_VALUE - 1L)); + + // These values are already out of bounds for longs due to rounding errors. + double longLowerBoundExclusive = (double) Long.MIN_VALUE; + double longUpperBoundExclusive = (double) Long.MAX_VALUE; + String largerThanLong = String.valueOf(randomDoubleBetween(longUpperBoundExclusive, Double.MAX_VALUE, true)); + String smallerThanLong = String.valueOf(randomDoubleBetween(-Double.MAX_VALUE, longLowerBoundExclusive, true)); + + List cases = List.of( + new OutOfRangeTestCase("byte", smallerThanInteger, largerThanInteger), + new OutOfRangeTestCase("short", smallerThanInteger, largerThanInteger), + new OutOfRangeTestCase("integer", smallerThanInteger, largerThanInteger), + new OutOfRangeTestCase("long", smallerThanLong, largerThanLong), + // TODO: add unsigned_long https://github.com/elastic/elasticsearch/issues/102935 + // TODO: add half_float, float https://github.com/elastic/elasticsearch/issues/100130 + new OutOfRangeTestCase("double", "-1.0/0.0", "1.0/0.0"), + new OutOfRangeTestCase("scaled_float", "-1.0/0.0", "1.0/0.0") + ); + + final String LT = "<"; + final String LTE = "<="; + final String GT = ">"; + final String GTE = ">="; + final String EQ = "=="; + final String NEQ = "!="; + + for (OutOfRangeTestCase testCase : cases) { + List trueForSingleValuesPredicates = List.of( + LT + testCase.tooHigh, + LTE + testCase.tooHigh, + GT + testCase.tooLow, + GTE + testCase.tooLow, + NEQ + testCase.tooHigh, + NEQ + testCase.tooLow, + NEQ + "0.0/0.0" + ); + List alwaysFalsePredicates = List.of( + LT + testCase.tooLow, + LTE + testCase.tooLow, + GT + testCase.tooHigh, + GTE + testCase.tooHigh, + EQ + testCase.tooHigh, + EQ + testCase.tooLow, + LT + "0.0/0.0", + LTE + "0.0/0.0", + GT + "0.0/0.0", + GTE + "0.0/0.0", + EQ + "0.0/0.0" + ); + + for (String truePredicate : trueForSingleValuesPredicates) { + String comparison = testCase.fieldName + truePredicate; + var query = "from test | where " + comparison; + Source expectedSource = new Source(1, 18, comparison); + + EsQueryExec actualQueryExec = doTestOutOfRangeFilterPushdown(query, allTypeMappingAnalyzer); + + assertThat(actualQueryExec.query(), is(instanceOf(SingleValueQuery.Builder.class))); + var actualLuceneQuery = (SingleValueQuery.Builder) actualQueryExec.query(); + assertThat(actualLuceneQuery.field(), equalTo(testCase.fieldName)); + assertThat(actualLuceneQuery.source(), equalTo(expectedSource)); + + assertThat(actualLuceneQuery.next(), equalTo(QueryBuilders.matchAllQuery())); + } + + for (String falsePredicate : alwaysFalsePredicates) { + String comparison = testCase.fieldName + falsePredicate; + var query = "from test | where " + comparison; + Source expectedSource = new Source(1, 18, comparison); + + EsQueryExec actualQueryExec = doTestOutOfRangeFilterPushdown(query, allTypeMappingAnalyzer); + + assertThat(actualQueryExec.query(), is(instanceOf(SingleValueQuery.Builder.class))); + var actualLuceneQuery = (SingleValueQuery.Builder) actualQueryExec.query(); + assertThat(actualLuceneQuery.field(), equalTo(testCase.fieldName)); + assertThat(actualLuceneQuery.source(), equalTo(expectedSource)); + + var expectedInnerQuery = QueryBuilders.boolQuery().mustNot(QueryBuilders.matchAllQuery()); + assertThat(actualLuceneQuery.next(), equalTo(expectedInnerQuery)); + } + } + } + + /** + * Expects e.g. + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[!alias_integer, boolean{f}#190, byte{f}#191, constant_keyword-foo{f}#192, date{f}#193, double{f}#194, ...]] + * \_FieldExtractExec[!alias_integer, boolean{f}#190, byte{f}#191, consta..][] + * \_EsQueryExec[test], query[{"esql_single_value":{"field":"byte","next":{"match_all":{"boost":1.0}},...}}] + */ + private EsQueryExec doTestOutOfRangeFilterPushdown(String query, Analyzer analyzer) { + var plan = plan(query, EsqlTestUtils.TEST_SEARCH_STATS, analyzer); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var luceneQuery = as(fieldExtract.child(), EsQueryExec.class); + + return luceneQuery; + } + /** * Expects * LimitExec[500[INTEGER]] @@ -486,7 +583,11 @@ private PhysicalPlan plan(String query) { } private PhysicalPlan plan(String query, SearchStats stats) { - var physical = optimizedPlan(physicalPlan(query), stats); + return plan(query, stats, analyzer); + } + + private PhysicalPlan plan(String query, SearchStats stats, Analyzer analyzer) { + var physical = optimizedPlan(physicalPlan(query, analyzer), stats); return physical; } @@ -509,7 +610,7 @@ private PhysicalPlan optimizedPlan(PhysicalPlan plan, SearchStats searchStats) { return l; } - private PhysicalPlan physicalPlan(String query) { + private PhysicalPlan physicalPlan(String query, Analyzer analyzer) { var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java index eee6f68c20ff7..5ffb4b5df08cc 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -123,11 +124,11 @@ public void infer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ) { switch (model.getConfigurations().getTaskType()) { - case ANY -> listener.onResponse(makeResults(input)); - case SPARSE_EMBEDDING -> listener.onResponse(makeResults(input)); + case ANY, SPARSE_EMBEDDING -> listener.onResponse(makeResults(input)); default -> listener.onFailure( new ElasticsearchStatusException( TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index b9cc14977b87e..fb3974fc12e8b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -92,6 +92,7 @@ private void inferOnService( model, request.getInput(), request.getTaskSettings(), + request.getInputType(), listener.delegateFailureAndWrap((l, inferenceResults) -> l.onResponse(new InferenceAction.Response(inferenceResults))) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java index 8c9d70f0a7323..0fb5ca9283fae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.external.action.cohere; +import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -28,8 +29,8 @@ public CohereActionCreator(Sender sender, ServiceComponents serviceComponents) { } @Override - public ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings) { - var overriddenModel = model.overrideWith(taskSettings); + public ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings, InputType inputType) { + var overriddenModel = CohereEmbeddingsModel.of(model, taskSettings, inputType); return new CohereEmbeddingsAction(sender, overriddenModel, serviceComponents); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java index 1500d48e3c201..cc732e7ab8dc5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.inference.external.action.cohere; +import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import java.util.Map; public interface CohereActionVisitor { - ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings); + ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings, InputType inputType); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java index 6c423760d0b35..94583c634fb26 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java @@ -29,7 +29,7 @@ public OpenAiActionCreator(Sender sender, ServiceComponents serviceComponents) { @Override public ExecutableAction create(OpenAiEmbeddingsModel model, Map taskSettings) { - var overriddenModel = model.overrideWith(taskSettings); + var overriddenModel = OpenAiEmbeddingsModel.of(model, taskSettings); return new OpenAiEmbeddingsAction(sender, overriddenModel, serviceComponents); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java index 8cacbd0f16aaf..30427aaa35869 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -62,6 +62,7 @@ public HttpRequest createHttpRequest() { httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + httpPost.setHeader(CohereUtils.createRequestSourceHeader()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index a0b5444ee45e4..9e34af5ed6385 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -20,6 +20,8 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings.invalidInputTypeMessage; + public record CohereEmbeddingsRequestEntity( List input, CohereEmbeddingsTaskSettings taskSettings, @@ -29,14 +31,6 @@ public record CohereEmbeddingsRequestEntity( private static final String SEARCH_DOCUMENT = "search_document"; private static final String SEARCH_QUERY = "search_query"; - /** - * Maps the {@link InputType} to the expected value for cohere for the input_type field in the request using the enum's ordinal. - * The order of these entries is important and needs to match the order in the enum - */ - private static final String[] INPUT_TYPE_MAPPING = { SEARCH_DOCUMENT, SEARCH_QUERY }; - static { - assert INPUT_TYPE_MAPPING.length == InputType.values().length : "input type mapping was incorrectly defined"; - } private static final String TEXTS_FIELD = "texts"; @@ -56,23 +50,31 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CohereServiceSettings.MODEL, model); } - if (taskSettings.inputType() != null) { - builder.field(INPUT_TYPE_FIELD, covertToString(taskSettings.inputType())); + if (taskSettings.getInputType() != null) { + builder.field(INPUT_TYPE_FIELD, covertToString(taskSettings.getInputType())); } if (embeddingType != null) { builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType)); } - if (taskSettings.truncation() != null) { - builder.field(CohereServiceFields.TRUNCATE, taskSettings.truncation()); + if (taskSettings.getTruncation() != null) { + builder.field(CohereServiceFields.TRUNCATE, taskSettings.getTruncation()); } builder.endObject(); return builder; } - private static String covertToString(InputType inputType) { - return INPUT_TYPE_MAPPING[inputType.ordinal()]; + // default for testing + static String covertToString(InputType inputType) { + return switch (inputType) { + case INGEST -> SEARCH_DOCUMENT; + case SEARCH -> SEARCH_QUERY; + default -> { + assert false : invalidInputTypeMessage(inputType); + yield null; + } + }; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java index f8ccd91d4e3d2..e54328df1dbf7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java @@ -7,10 +7,19 @@ package org.elasticsearch.xpack.inference.external.request.cohere; +import org.apache.http.Header; +import org.apache.http.message.BasicHeader; + public class CohereUtils { public static final String HOST = "api.cohere.ai"; public static final String VERSION_1 = "v1"; public static final String EMBEDDINGS_PATH = "embed"; + public static final String REQUEST_SOURCE_HEADER = "Request-Source"; + public static final String ELASTIC_REQUEST_SOURCE = "unspecified:elasticsearch"; + + public static Header createRequestSourceHeader() { + return new BasicHeader(REQUEST_SOURCE_HEADER, ELASTIC_REQUEST_SOURCE); + } private CohereUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index bb45e8fd684a6..0c40863b37db2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; @@ -41,16 +42,23 @@ protected ServiceComponents getServiceComponents() { } @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { + public void infer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { init(); - doInfer(model, input, taskSettings, listener); + doInfer(model, input, taskSettings, inputType, listener); } protected abstract void doInfer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index c218a0ff12c22..7637bd9740670 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -11,10 +11,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; @@ -24,7 +24,7 @@ import java.net.URI; import java.net.URISyntaxException; -import java.util.Arrays; +import java.util.EnumSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -110,7 +110,7 @@ public static String mustBeNonEmptyString(String settingName, String scope) { return Strings.format("[%s] Invalid value empty string. [%s] must be a non-empty string", scope, settingName); } - public static String invalidValue(String settingName, String scope, String invalidType, String... requiredTypes) { + public static String invalidValue(String settingName, String scope, String invalidType, String[] requiredTypes) { return Strings.format( "[%s] Invalid value [%s] received. [%s] must be one of [%s]", scope, @@ -221,12 +221,12 @@ public static String extractOptionalString( return optionalField; } - public static T extractOptionalEnum( + public static > E extractOptionalEnum( Map map, String settingName, String scope, - CheckedFunction converter, - T[] validTypes, + EnumConstructor constructor, + EnumSet validValues, ValidationException validationException ) { var enumString = extractOptionalString(map, settingName, scope, validationException); @@ -234,16 +234,34 @@ public static T extractOptionalEnum( return null; } - var validTypesAsStrings = Arrays.stream(validTypes).map(type -> type.toString().toLowerCase(Locale.ROOT)).toArray(String[]::new); + var validValuesAsStrings = validValues.stream().map(value -> value.toString().toLowerCase(Locale.ROOT)).toArray(String[]::new); try { - return converter.apply(enumString); + var createdEnum = constructor.apply(enumString); + validateEnumValue(createdEnum, validValues); + + return createdEnum; } catch (IllegalArgumentException e) { - validationException.addValidationError(invalidValue(settingName, scope, enumString, validTypesAsStrings)); + validationException.addValidationError(invalidValue(settingName, scope, enumString, validValuesAsStrings)); } return null; } + private static > void validateEnumValue(E enumValue, EnumSet validValues) { + if (validValues.contains(enumValue) == false) { + throw new IllegalArgumentException(Strings.format("Enum value [%s] is not one of the acceptable values", enumValue.toString())); + } + } + + /** + * Functional interface for creating an enum from a string. + * @param + */ + @FunctionalInterface + public interface EnumConstructor> { + E apply(String name) throws IllegalArgumentException; + } + public static String parsePersistedConfigErrorMsg(String inferenceEntityId, String serviceName) { return format( "Failed to parse stored model [%s] for [%s] service, please delete and add the service again", @@ -272,7 +290,7 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod public static void getEmbeddingSize(Model model, InferenceService service, ActionListener listener) { assert model.getTaskType() == TaskType.TEXT_EMBEDDING; - service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), listener.delegateFailureAndWrap((delegate, r) -> { + service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), InputType.INGEST, listener.delegateFailureAndWrap((delegate, r) -> { if (r instanceof TextEmbedding embeddingResults) { try { delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java index 1b4843e441248..81a27e1e536f3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.cohere; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -30,5 +31,5 @@ protected CohereModel(CohereModel model, ServiceSettings serviceSettings) { super(model, serviceSettings); } - public abstract ExecutableAction accept(CohereActionVisitor creator, Map taskSettings); + public abstract ExecutableAction accept(CohereActionVisitor creator, Map taskSettings, InputType inputType); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 8783f12852ec8..3f608c977f686 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -123,6 +124,7 @@ public void doInfer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ) { if (model instanceof CohereModel == false) { @@ -133,7 +135,7 @@ public void doInfer( CohereModel cohereModel = (CohereModel) model; var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); - var action = cohereModel.accept(actionCreator, taskSettings); + var action = cohereModel.accept(actionCreator, taskSettings, inputType); action.execute(input, listener); } @@ -174,6 +176,6 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java index c92700e87cd96..a3afdc306b217 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -19,6 +20,11 @@ import java.util.Map; public class CohereEmbeddingsModel extends CohereModel { + public static CohereEmbeddingsModel of(CohereEmbeddingsModel model, Map taskSettings, InputType inputType) { + var requestTaskSettings = CohereEmbeddingsTaskSettings.fromMap(taskSettings); + return new CohereEmbeddingsModel(model, CohereEmbeddingsTaskSettings.of(model.getTaskSettings(), requestTaskSettings, inputType)); + } + public CohereEmbeddingsModel( String modelId, TaskType taskType, @@ -73,16 +79,7 @@ public DefaultSecretSettings getSecretSettings() { } @Override - public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings) { - return visitor.create(this, taskSettings); - } - - public CohereEmbeddingsModel overrideWith(Map taskSettings) { - if (taskSettings == null || taskSettings.isEmpty()) { - return this; - } - - var requestTaskSettings = CohereEmbeddingsTaskSettings.fromMap(taskSettings); - return new CohereEmbeddingsModel(this, getTaskSettings().overrideWith(requestTaskSettings)); + public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings, inputType); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 5327bcbcf22dd..916e7fadcc8fb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import java.io.IOException; +import java.util.EnumSet; import java.util.Map; import java.util.Objects; @@ -37,7 +38,7 @@ public static CohereEmbeddingsServiceSettings fromMap(Map map) { EMBEDDING_TYPE, ModelConfigurations.SERVICE_SETTINGS, CohereEmbeddingType::fromString, - CohereEmbeddingType.values(), + EnumSet.allOf(CohereEmbeddingType.class), validationException ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java index 858efdb0d1ace..b294350580a2e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,7 +21,9 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; import java.io.IOException; +import java.util.EnumSet; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; import static org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields.TRUNCATE; @@ -31,18 +34,16 @@ *

* See api docs for details. *

- * - * @param inputType Specifies the type of input you're giving to the model - * @param truncation Specifies how the API will handle inputs longer than the maximum token length */ -public record CohereEmbeddingsTaskSettings(@Nullable InputType inputType, @Nullable CohereTruncation truncation) implements TaskSettings { +public class CohereEmbeddingsTaskSettings implements TaskSettings { public static final String NAME = "cohere_embeddings_task_settings"; public static final CohereEmbeddingsTaskSettings EMPTY_SETTINGS = new CohereEmbeddingsTaskSettings(null, null); static final String INPUT_TYPE = "input_type"; + private static final EnumSet VALID_REQUEST_VALUES2 = EnumSet.of(InputType.INGEST, InputType.SEARCH); public static CohereEmbeddingsTaskSettings fromMap(Map map) { - if (map.isEmpty()) { + if (map == null || map.isEmpty()) { return EMPTY_SETTINGS; } @@ -53,7 +54,7 @@ public static CohereEmbeddingsTaskSettings fromMap(Map map) { INPUT_TYPE, ModelConfigurations.TASK_SETTINGS, InputType::fromString, - InputType.values(), + VALID_REQUEST_VALUES2, validationException ); CohereTruncation truncation = extractOptionalEnum( @@ -61,7 +62,7 @@ public static CohereEmbeddingsTaskSettings fromMap(Map map) { TRUNCATE, ModelConfigurations.TASK_SETTINGS, CohereTruncation::fromString, - CohereTruncation.values(), + EnumSet.allOf(CohereTruncation.class), validationException ); @@ -72,10 +73,73 @@ public static CohereEmbeddingsTaskSettings fromMap(Map map) { return new CohereEmbeddingsTaskSettings(inputType, truncation); } + /** + * Creates a new {@link CohereEmbeddingsTaskSettings} by preferring non-null fields from the provided parameters. + * For the input type, preference is given to requestInputType if it is not null and not UNSPECIFIED. + * Then preference is given to the requestTaskSettings and finally to originalSettings even if the value is null. + * + * Similarly, for the truncation field preference is given to requestTaskSettings if it is not null and then to + * originalSettings. + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @param requestInputType the input type passed in the request parameters + * @return a constructed {@link CohereEmbeddingsTaskSettings} + */ + public static CohereEmbeddingsTaskSettings of( + CohereEmbeddingsTaskSettings originalSettings, + CohereEmbeddingsTaskSettings requestTaskSettings, + InputType requestInputType + ) { + var inputTypeToUse = getValidInputType(originalSettings, requestTaskSettings, requestInputType); + var truncationToUse = getValidTruncation(originalSettings, requestTaskSettings); + + return new CohereEmbeddingsTaskSettings(inputTypeToUse, truncationToUse); + } + + private static InputType getValidInputType( + CohereEmbeddingsTaskSettings originalSettings, + CohereEmbeddingsTaskSettings requestTaskSettings, + InputType requestInputType + ) { + InputType inputTypeToUse = originalSettings.inputType; + + if (VALID_REQUEST_VALUES2.contains(requestInputType)) { + inputTypeToUse = requestInputType; + } else if (requestTaskSettings.inputType != null) { + inputTypeToUse = requestTaskSettings.inputType; + } + + return inputTypeToUse; + } + + private static CohereTruncation getValidTruncation( + CohereEmbeddingsTaskSettings originalSettings, + CohereEmbeddingsTaskSettings requestTaskSettings + ) { + return requestTaskSettings.getTruncation() == null ? originalSettings.truncation : requestTaskSettings.getTruncation(); + } + + private final InputType inputType; + private final CohereTruncation truncation; + public CohereEmbeddingsTaskSettings(StreamInput in) throws IOException { this(in.readOptionalEnum(InputType.class), in.readOptionalEnum(CohereTruncation.class)); } + public CohereEmbeddingsTaskSettings(@Nullable InputType inputType, @Nullable CohereTruncation truncation) { + validateInputType(inputType); + this.inputType = inputType; + this.truncation = truncation; + } + + private static void validateInputType(InputType inputType) { + if (inputType == null) { + return; + } + + assert VALID_REQUEST_VALUES2.contains(inputType) : invalidInputTypeMessage(inputType); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -90,6 +154,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public InputType getInputType() { + return inputType; + } + + public CohereTruncation getTruncation() { + return truncation; + } + @Override public String getWriteableName() { return NAME; @@ -106,10 +178,20 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalEnum(truncation); } - public CohereEmbeddingsTaskSettings overrideWith(CohereEmbeddingsTaskSettings requestTaskSettings) { - var inputTypeToUse = requestTaskSettings.inputType() == null ? inputType : requestTaskSettings.inputType(); - var truncationToUse = requestTaskSettings.truncation() == null ? truncation : requestTaskSettings.truncation(); + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereEmbeddingsTaskSettings that = (CohereEmbeddingsTaskSettings) o; + return Objects.equals(inputType, that.inputType) && Objects.equals(truncation, that.truncation); + } - return new CohereEmbeddingsTaskSettings(inputTypeToUse, truncationToUse); + @Override + public int hashCode() { + return Objects.hash(inputType, truncation); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 12bdcd3f20614..1d0bd123c69f3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -19,6 +19,7 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; @@ -210,7 +211,13 @@ public void stop(String inferenceEntityId, ActionListener listener) { } @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { + public void infer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { // No task settings to override with requestTaskSettings if (TaskType.SPARSE_EMBEDDING.isAnyOrSame(model.getConfigurations().getTaskType()) == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index ef93cdd57b756..dcaa760868c49 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -96,6 +97,7 @@ public void doInfer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ) { if (model instanceof HuggingFaceModel == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 9b5283ef4f803..594d7cf2cf31c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -136,6 +137,7 @@ public void doInfer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ) { if (model instanceof OpenAiModel == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 98b0161665d8e..74d97099bbb76 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -21,6 +21,15 @@ public class OpenAiEmbeddingsModel extends OpenAiModel { + public static OpenAiEmbeddingsModel of(OpenAiEmbeddingsModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); + return new OpenAiEmbeddingsModel(model, OpenAiEmbeddingsTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + public OpenAiEmbeddingsModel( String inferenceEntityId, TaskType taskType, @@ -78,13 +87,4 @@ public DefaultSecretSettings getSecretSettings() { public ExecutableAction accept(OpenAiActionVisitor creator, Map taskSettings) { return creator.create(this, taskSettings); } - - public OpenAiEmbeddingsModel overrideWith(Map taskSettings) { - if (taskSettings == null || taskSettings.isEmpty()) { - return this; - } - - var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); - return new OpenAiEmbeddingsModel(this, getTaskSettings().overrideWith(requestTaskSettings)); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index 45a9ce1cabbc3..c6f3179a4f088 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -50,6 +50,23 @@ public static OpenAiEmbeddingsTaskSettings fromMap(Map map) { return new OpenAiEmbeddingsTaskSettings(model, user); } + /** + * Creates a new {@link OpenAiEmbeddingsTaskSettings} object by overriding the values in originalSettings with the ones + * passed in via requestSettings if the fields are not null. + * @param originalSettings the original task settings from the inference entity configuration from storage + * @param requestSettings the task settings from the request + * @return a new {@link OpenAiEmbeddingsTaskSettings} + */ + public static OpenAiEmbeddingsTaskSettings of( + OpenAiEmbeddingsTaskSettings originalSettings, + OpenAiEmbeddingsRequestTaskSettings requestSettings + ) { + var modelToUse = requestSettings.model() == null ? originalSettings.model : requestSettings.model(); + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + + return new OpenAiEmbeddingsTaskSettings(modelToUse, userToUse); + } + public OpenAiEmbeddingsTaskSettings { Objects.requireNonNull(model); } @@ -84,11 +101,4 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(model); out.writeOptionalString(user); } - - public OpenAiEmbeddingsTaskSettings overrideWith(OpenAiEmbeddingsRequestTaskSettings requestSettings) { - var modelToUse = requestSettings.model() == null ? model : requestSettings.model(); - var userToUse = requestSettings.user() == null ? user : requestSettings.user(); - - return new OpenAiEmbeddingsTaskSettings(modelToUse, userToUse); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InputTypeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InputTypeTests.java new file mode 100644 index 0000000000000..088f93507d35f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InputTypeTests.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.inference.InputType; +import org.elasticsearch.test.ESTestCase; + +public class InputTypeTests extends ESTestCase { + public static InputType randomWithoutUnspecified() { + return randomFrom(InputType.INGEST, InputType.SEARCH); + } + + public static InputType[] valuesWithoutUnspecified() { + return new InputType[] { InputType.INGEST, InputType.SEARCH }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index 4f7ae9436418f..396af55ce5616 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -7,22 +7,26 @@ package org.elasticsearch.xpack.inference.action; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.is; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -public class InferenceActionRequestTests extends AbstractWireSerializingTestCase { +public class InferenceActionRequestTests extends AbstractBWCWireSerializationTestCase { @Override protected Writeable.Reader instanceReader() { @@ -70,7 +74,7 @@ public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { """; try (var parser = createParser(JsonXContent.jsonXContent, singleInputRequest)) { var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); - assertThat(request.getInputType(), is(InputType.INGEST)); + assertThat(request.getInputType(), is(InputType.UNSPECIFIED)); } } @@ -135,4 +139,76 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc default -> throw new UnsupportedOperationException(); }; } + + @Override + protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Request instance, TransportVersion version) { + if (version.before(TransportVersions.INFERENCE_MULTIPLE_INPUTS)) { + return new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + instance.getInput().subList(0, 1), + instance.getTaskSettings(), + InputType.UNSPECIFIED + ); + } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + return new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + instance.getInput(), + instance.getTaskSettings(), + InputType.UNSPECIFIED + ); + } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) + && instance.getInputType() == InputType.UNSPECIFIED) { + return new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + instance.getInput(), + instance.getTaskSettings(), + InputType.INGEST + ); + } + + return instance; + } + + public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOException { + assertBwcSerialization( + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED + ); + } + + public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { + assertBwcSerialization( + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED + ); + } + + public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED); + + InferenceAction.Request deserializedInstance = copyWriteable( + instance, + getNamedWriteableRegistry(), + instanceReader(), + TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED + ); + + assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); + } + + public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.INGEST); + + InferenceAction.Request deserializedInstance = copyWriteable( + instance, + getNamedWriteableRegistry(), + instanceReader(), + TransportVersions.HOT_THREADS_AS_BYTES + ); + + assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java index 67a95265f093d..e7cfc784db117 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java @@ -110,7 +110,7 @@ public void testCreate_CohereEmbeddingsModel() throws IOException { ); var actionCreator = new CohereActionCreator(sender, createWithEmptySettings(threadPool)); var overriddenTaskSettings = CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, CohereTruncation.END); - var action = actionCreator.create(model, overriddenTaskSettings); + var action = actionCreator.create(model, overriddenTaskSettings, InputType.UNSPECIFIED); PlainActionFuture listener = new PlainActionFuture<>(); action.execute(List.of("abc"), listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java index 501d5a5e42bfe..7fd33f7bba58f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.cohere.CohereUtils; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; @@ -130,6 +131,10 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { equalTo(XContentType.JSON.mediaType()) ); MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(CohereUtils.REQUEST_SOURCE_HEADER), + equalTo(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( @@ -210,6 +215,10 @@ public void testExecute_ReturnsSuccessfulResponse_ForInt8ResponseType() throws I equalTo(XContentType.JSON.mediaType()) ); MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(CohereUtils.REQUEST_SOURCE_HEADER), + equalTo(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java index 8ef9ea4b0316b..2d3ff25222ab9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -66,4 +66,9 @@ public void testXContent_WritesNoOptionalFields_WhenTheyAreNotDefined() throws I MatcherAssert.assertThat(xContentResult, is(""" {"texts":["abc"]}""")); } + + public void testConvertToString_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { + var thrownException = expectThrows(AssertionError.class, () -> CohereEmbeddingsRequestEntity.covertToString(InputType.UNSPECIFIED)); + MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index df61417ffff9c..d3783f6fed76b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -44,6 +44,10 @@ public void testCreateRequest_UrlDefined() throws URISyntaxException, IOExceptio MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + MatcherAssert.assertThat( + httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), + is(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc")))); @@ -71,6 +75,10 @@ public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOE MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + MatcherAssert.assertThat( + httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), + is(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); MatcherAssert.assertThat( @@ -114,6 +122,10 @@ public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() th MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + MatcherAssert.assertThat( + httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), + is(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); MatcherAssert.assertThat( @@ -157,6 +169,10 @@ public void testCreateRequest_TruncateNone() throws URISyntaxException, IOExcept MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + MatcherAssert.assertThat( + httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), + is(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none"))); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index 31d7667fa6665..8b596aa5cf0c8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -105,6 +106,7 @@ protected void doInfer( Model model, List input, Map taskSettings, + InputType inputType, ActionListener listener ) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index b935c5a8c64b3..689c9f9b08a2b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -261,7 +262,7 @@ public void testExtractOptionalString_AddsException_WhenFieldIsEmpty() { public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "value")); - var createdEnum = extractOptionalEnum(map, "abc", "scope", InputType::fromString, InputType.values(), validation); + var createdEnum = extractOptionalEnum(map, "abc", "scope", InputType::fromString, EnumSet.allOf(InputType.class), validation); assertNull(createdEnum); assertTrue(validation.validationErrors().isEmpty()); @@ -271,7 +272,14 @@ public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { public void testExtractOptionalEnum_ReturnsNullAndAddsException_WhenAnInvalidValueExists() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "invalid_value")); - var createdEnum = extractOptionalEnum(map, "key", "scope", InputType::fromString, InputType.values(), validation); + var createdEnum = extractOptionalEnum( + map, + "key", + "scope", + InputType::fromString, + EnumSet.of(InputType.INGEST, InputType.SEARCH), + validation + ); assertNull(createdEnum); assertFalse(validation.validationErrors().isEmpty()); @@ -282,6 +290,27 @@ public void testExtractOptionalEnum_ReturnsNullAndAddsException_WhenAnInvalidVal ); } + public void testExtractOptionalEnum_ReturnsNullAndAddsException_WhenValueIsNotPartOfTheAcceptableValues() { + var validation = new ValidationException(); + Map map = modifiableMap(Map.of("key", InputType.UNSPECIFIED.toString())); + var createdEnum = extractOptionalEnum(map, "key", "scope", InputType::fromString, EnumSet.of(InputType.INGEST), validation); + + assertNull(createdEnum); + assertFalse(validation.validationErrors().isEmpty()); + assertTrue(map.isEmpty()); + assertThat(validation.validationErrors().get(0), is("[scope] Invalid value [unspecified] received. [key] must be one of [ingest]")); + } + + public void testExtractOptionalEnum_ReturnsIngest_WhenValueIsAcceptable() { + var validation = new ValidationException(); + Map map = modifiableMap(Map.of("key", InputType.INGEST.toString())); + var createdEnum = extractOptionalEnum(map, "key", "scope", InputType::fromString, EnumSet.of(InputType.INGEST), validation); + + assertThat(createdEnum, is(InputType.INGEST)); + assertTrue(validation.validationErrors().isEmpty()); + assertTrue(map.isEmpty()); + } + public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() { var service = mock(InferenceService.class); @@ -290,11 +319,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[3]; + ActionListener listener = (ActionListener) invocation.getArguments()[4]; listener.onResponse(new TextEmbeddingResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -313,11 +342,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmp doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[3]; + ActionListener listener = (ActionListener) invocation.getArguments()[4]; listener.onResponse(new TextEmbeddingByteResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -338,11 +367,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[3]; + ActionListener listener = (ActionListener) invocation.getArguments()[4]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -362,11 +391,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[3]; + ActionListener listener = (ActionListener) invocation.getArguments()[4]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 0250e08a48452..7daad207f9068 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettingsTests; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; @@ -686,7 +687,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException try (var service = new CohereService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), listener); + service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat( @@ -745,7 +746,7 @@ public void testInfer_SendsRequest() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -848,7 +849,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); @@ -857,6 +858,193 @@ public void testInfer_UnauthorisedResponse() throws IOException { } } + public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsAreEmpty() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 1024, + 1024, + "model", + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + } + } + + public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIsNull_AndRequestTaskSettingsIsSearch() + throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(null, null), + 1024, + 1024, + "model", + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + model, + List.of("abc"), + CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, null), + InputType.INGEST, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + } + } + + public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspecifiedIsPassedInRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new CohereService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(null, null), + 1024, + 1024, + "model", + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.UNSPECIFIED, listener); + + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(0.123F, -0.123F))))); + MatcherAssert.assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + MatcherAssert.assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaType()) + ); + MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model"))); + } + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java index 1961d6b168d54..5570731dbe8d9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -21,12 +21,36 @@ import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.sameInstance; public class CohereEmbeddingsModelTests extends ESTestCase { - public void testOverrideWith_OverridesInputType_WithSearch() { + public void testOverrideWith_DoesNotOverrideAndModelRemainsEqual_WhenSettingsAreEmpty_AndInputTypeIsInvalid() { + var model = createModel("url", "api_key", null, null, null); + + var overriddenModel = CohereEmbeddingsModel.of(model, Map.of(), InputType.UNSPECIFIED); + MatcherAssert.assertThat(overriddenModel, is(model)); + } + + public void testOverrideWith_DoesNotOverrideAndModelRemainsEqual_WhenSettingsAreNull_AndInputTypeIsInvalid() { + var model = createModel("url", "api_key", null, null, null); + + var overriddenModel = CohereEmbeddingsModel.of(model, null, InputType.UNSPECIFIED); + MatcherAssert.assertThat(overriddenModel, is(model)); + } + + public void testOverrideWith_SetsInputTypeToIngest_WhenTheFieldIsNullInModelTaskSettings_AndNullInRequestTaskSettings() { var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(null, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.INGEST); + var expectedModel = createModel( "url", "api_key", new CohereEmbeddingsTaskSettings(InputType.INGEST, null), @@ -35,8 +59,21 @@ public void testOverrideWith_OverridesInputType_WithSearch() { "model", CohereEmbeddingType.FLOAT ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); + } - var overriddenModel = model.overrideWith(getTaskSettingsMap(InputType.SEARCH, null)); + public void testOverrideWith_SetsInputType_FromRequest_IfValid_OverridingStoredTaskSettings() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.SEARCH); var expectedModel = createModel( "url", "api_key", @@ -49,18 +86,100 @@ public void testOverrideWith_OverridesInputType_WithSearch() { MatcherAssert.assertThat(overriddenModel, is(expectedModel)); } - public void testOverrideWith_DoesNotOverride_WhenSettingsAreEmpty() { - var model = createModel("url", "api_key", null, null, null); + public void testOverrideWith_SetsInputType_FromRequest_IfValid_OverridingRequestTaskSettings() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(null, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); - var overriddenModel = model.overrideWith(Map.of()); - MatcherAssert.assertThat(overriddenModel, sameInstance(model)); + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(InputType.INGEST, null), InputType.SEARCH); + var expectedModel = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.SEARCH, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); } - public void testOverrideWith_DoesNotOverride_WhenSettingsAreNull() { - var model = createModel("url", "api_key", null, null, null); + public void testOverrideWith_OverridesInputType_WithRequestTaskSettingsSearch_WhenRequestInputTypeIsInvalid() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(InputType.SEARCH, null), InputType.UNSPECIFIED); + var expectedModel = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.SEARCH, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); + } + + public void testOverrideWith_DoesNotSetInputType_FromRequest_IfInputTypeIsInvalid() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(null, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); - var overriddenModel = model.overrideWith(null); - MatcherAssert.assertThat(overriddenModel, sameInstance(model)); + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.UNSPECIFIED); + var expectedModel = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(null, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); + } + + public void testOverrideWith_DoesNotSetInputType_WhenRequestTaskSettingsIsNull_AndRequestInputTypeIsInvalid() { + var model = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + + var overriddenModel = CohereEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.UNSPECIFIED); + var expectedModel = createModel( + "url", + "api_key", + new CohereEmbeddingsTaskSettings(InputType.INGEST, null), + null, + null, + "model", + CohereEmbeddingType.FLOAT + ); + MatcherAssert.assertThat(overriddenModel, is(expectedModel)); } public static CohereEmbeddingsModel createModel( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 164d3998f138f..77e3280d18f93 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -13,20 +13,21 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; -import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; +import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.xpack.inference.InputTypeTests.randomWithoutUnspecified; import static org.hamcrest.Matchers.is; public class CohereEmbeddingsTaskSettingsTests extends AbstractWireSerializingTestCase { public static CohereEmbeddingsTaskSettings createRandom() { - var inputType = randomBoolean() ? randomFrom(InputType.values()) : null; + var inputType = randomBoolean() ? randomWithoutUnspecified() : null; var truncation = randomBoolean() ? randomFrom(CohereTruncation.values()) : null; return new CohereEmbeddingsTaskSettings(inputType, truncation); @@ -39,6 +40,10 @@ public void testFromMap_CreatesEmptySettings_WhenAllFieldsAreNull() { ); } + public void testFromMap_CreatesEmptySettings_WhenMapIsNull() { + MatcherAssert.assertThat(CohereEmbeddingsTaskSettings.fromMap(null), is(new CohereEmbeddingsTaskSettings(null, null))); + } + public void testFromMap_CreatesSettings_WhenAllFieldsOfSettingsArePresent() { MatcherAssert.assertThat( CohereEmbeddingsTaskSettings.fromMap( @@ -67,26 +72,55 @@ public void testFromMap_ReturnsFailure_WhenInputTypeIsInvalid() { ); } - public void testOverrideWith_KeepsOriginalValuesWhenOverridesAreNull() { - var taskSettings = CohereEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(CohereServiceSettings.MODEL, "model", CohereServiceFields.TRUNCATE, CohereTruncation.END.toString())) + public void testFromMap_ReturnsFailure_WhenInputTypeIsUnspecified() { + var exception = expectThrows( + ValidationException.class, + () -> CohereEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsTaskSettings.INPUT_TYPE, InputType.UNSPECIFIED.toString())) + ) + ); + + MatcherAssert.assertThat( + exception.getMessage(), + is("Validation Failed: 1: [task_settings] Invalid value [unspecified] received. [input_type] must be one of [ingest, search];") ); + } + + public void testXContent_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { + var thrownException = expectThrows(AssertionError.class, () -> new CohereEmbeddingsTaskSettings(InputType.UNSPECIFIED, null)); + MatcherAssert.assertThat(thrownException.getMessage(), CoreMatchers.is("received invalid input type value [unspecified]")); + } - var overriddenTaskSettings = taskSettings.overrideWith(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS); + public void testOf_KeepsOriginalValuesWhenRequestSettingsAreNull_AndRequestInputTypeIsInvalid() { + var taskSettings = new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.NONE); + var overriddenTaskSettings = CohereEmbeddingsTaskSettings.of( + taskSettings, + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + InputType.UNSPECIFIED + ); MatcherAssert.assertThat(overriddenTaskSettings, is(taskSettings)); } - public void testOverrideWith_UsesOverriddenSettings() { - var taskSettings = CohereEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(CohereServiceFields.TRUNCATE, CohereTruncation.END.toString())) + public void testOf_UsesRequestTaskSettings() { + var taskSettings = new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE); + var overriddenTaskSettings = CohereEmbeddingsTaskSettings.of( + taskSettings, + new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.END), + InputType.UNSPECIFIED ); - var requestTaskSettings = CohereEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(CohereServiceFields.TRUNCATE, CohereTruncation.START.toString())) + MatcherAssert.assertThat(overriddenTaskSettings, is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.END))); + } + + public void testOf_UsesRequestTaskSettings_AndRequestInputType() { + var taskSettings = new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE); + var overriddenTaskSettings = CohereEmbeddingsTaskSettings.of( + taskSettings, + new CohereEmbeddingsTaskSettings(null, CohereTruncation.END), + InputType.INGEST ); - var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); - MatcherAssert.assertThat(overriddenTaskSettings, is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.START))); + MatcherAssert.assertThat(overriddenTaskSettings, is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.END))); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index e9fb835016b4f..dcf8b3a900a22 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -64,7 +65,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep try (var service = new TestService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), listener); + service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index a76cce41b4fe4..36a4d144d8c5c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -492,7 +493,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -527,7 +528,7 @@ public void testInfer_SendsElserRequest() throws IOException { var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 394286ee5287b..2659715771686 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -667,7 +668,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException try (var service = new OpenAiService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), listener); + service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -713,7 +714,7 @@ public void testInfer_SendsRequest() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -787,7 +788,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), listener); + service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 10e856ec8a27e..e2144132af6c1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -27,7 +27,7 @@ public void testOverrideWith_OverridesUser() { var model = createModel("url", "org", "api_key", "model_name", null); var requestTaskSettingsMap = getRequestTaskSettingsMap(null, "user_override"); - var overriddenModel = model.overrideWith(requestTaskSettingsMap); + var overriddenModel = OpenAiEmbeddingsModel.of(model, requestTaskSettingsMap); assertThat(overriddenModel, is(createModel("url", "org", "api_key", "model_name", "user_override"))); } @@ -37,14 +37,14 @@ public void testOverrideWith_EmptyMap() { var requestTaskSettingsMap = Map.of(); - var overriddenModel = model.overrideWith(requestTaskSettingsMap); + var overriddenModel = OpenAiEmbeddingsModel.of(model, requestTaskSettingsMap); assertThat(overriddenModel, sameInstance(model)); } public void testOverrideWith_NullMap() { var model = createModel("url", "org", "api_key", "model_name", null); - var overriddenModel = model.overrideWith(null); + var overriddenModel = OpenAiEmbeddingsModel.of(model, null); assertThat(overriddenModel, sameInstance(model)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index f297eb622c421..103fab071098e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -72,7 +72,7 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) ); - var overriddenTaskSettings = taskSettings.overrideWith(OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS); + var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS); MatcherAssert.assertThat(overriddenTaskSettings, is(taskSettings)); } @@ -85,7 +85,7 @@ public void testOverrideWith_UsesOverriddenSettings() { new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model2", OpenAiEmbeddingsTaskSettings.USER, "user2")) ); - var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); + var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user2"))); } @@ -98,7 +98,7 @@ public void testOverrideWith_UsesOnlyNonNullModelSetting() { new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model2")) ); - var overriddenTaskSettings = taskSettings.overrideWith(requestTaskSettings); + var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user"))); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index 38c7f85b189f2..b37f82e45ec49 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.Max; @@ -191,7 +191,7 @@ private void initChunkedBucketSearcher( ActionListener.wrap(searchResponse -> { long totalHits = searchResponse.getHits().getTotalHits().value; if (totalHits > 0) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); long earliestTime = Intervals.alignToFloor((long) min.value(), maxBucketSpanMillis); Max max = aggregations.get(LATEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java index 780841880a6c1..88c19fc670794 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/MlAggsHelper.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.aggs; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InvalidAggregationPathException; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; @@ -34,7 +34,7 @@ public static InvalidAggregationPathException invalidPathException(List * @param aggregations The aggregations * @return The double values and doc_counts extracted from the path if the bucket path exists and the value is a valid number */ - public static Optional extractDoubleBucketedValues(String bucketPath, Aggregations aggregations) { + public static Optional extractDoubleBucketedValues(String bucketPath, InternalAggregations aggregations) { return extractDoubleBucketedValues(bucketPath, aggregations, BucketHelpers.GapPolicy.INSERT_ZEROS, false); } @@ -50,7 +50,7 @@ public static Optional extractDoubleBucketedValues(String bu */ public static Optional extractDoubleBucketedValues( String bucketPath, - Aggregations aggregations, + InternalAggregations aggregations, BucketHelpers.GapPolicy gapPolicy, boolean excludeLastBucket ) { @@ -101,7 +101,7 @@ public static Optional extractDoubleBucketedValues( public static Optional extractBucket( String bucketPath, - Aggregations aggregations, + InternalAggregations aggregations, int bucket ) { List parsedPath = AggregationPath.parse(bucketPath).getPathElementsAsStringList(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java index 48c0f645b6fbc..650c02af00837 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java @@ -17,8 +17,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; @@ -92,7 +92,7 @@ public ChangePointAggregator(String name, String bucketsPath, Map maybeBucketsValue = extractDoubleBucketedValues( bucketsPaths()[0], aggregations, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java index 02386acbd6134..97e803b5961a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregator.java @@ -9,8 +9,8 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; @@ -33,7 +33,7 @@ public BucketCorrelationAggregator( } @Override - public InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) { + public InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) { CountCorrelationIndicator bucketPathValue = MlAggsHelper.extractDoubleBucketedValues(bucketsPaths()[0], aggregations) .map( doubleBucketValues -> new CountCorrelationIndicator( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java index ea01f07146ea6..fd5c66399c72d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java @@ -26,7 +26,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; public class InferencePipelineAggregator extends PipelineAggregator { @@ -102,12 +101,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe inference = new WarningInferenceResults(e.getMessage()); } - final List aggs = bucket.getAggregations() - .asList() - .stream() - .map((p) -> (InternalAggregation) p) - .collect(Collectors.toList()); - + final List aggs = new ArrayList<>(bucket.getAggregations().asList()); InternalInferenceAggregation aggResult = new InternalInferenceAggregation(name(), metadata(), inference); aggs.add(aggResult); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java index 518b76aae3732..f26dadf5ece22 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregator.java @@ -13,8 +13,8 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.xpack.ml.aggs.DoubleArray; import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; @@ -224,7 +224,7 @@ private static double sidedKSStat(double a, double b, Alternative alternative) { } @Override - public InternalAggregation doReduce(Aggregations aggregations, AggregationReduceContext context) { + public InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context) { Optional maybeBucketsValue = extractDoubleBucketedValues(bucketsPaths()[0], aggregations).map( bucketValue -> { double[] values = new double[bucketValue.getValues().length + 1]; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java index 468eecc9e56e5..4cd5379d8fe3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java @@ -14,7 +14,8 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigUtils; @@ -96,7 +97,7 @@ public Result next() throws IOException { SearchInterval searchInterval = new SearchInterval(context.start, context.end); if (aggregationToJsonProcessor == null) { - Aggregations aggs = search(); + InternalAggregations aggs = search(); if (aggs == null) { hasNext = false; return new Result(searchInterval, Optional.empty()); @@ -118,7 +119,7 @@ public Result next() throws IOException { ); } - private Aggregations search() { + private InternalAggregations search() { LOGGER.debug("[{}] Executing aggregated search", context.jobId); T searchRequest = buildSearchRequest(buildBaseSearchSource()); assert searchRequest.request().allowPartialSearchResults() == false; @@ -133,7 +134,7 @@ private Aggregations search() { } } - private void initAggregationProcessor(Aggregations aggs) throws IOException { + private void initAggregationProcessor(InternalAggregations aggs) throws IOException { aggregationToJsonProcessor = new AggregationToJsonProcessor( context.timeField, context.fields, @@ -167,11 +168,11 @@ private SearchSourceBuilder buildBaseSearchSource() { protected abstract T buildSearchRequest(SearchSourceBuilder searchRequestBuilder); - private static Aggregations validateAggs(@Nullable Aggregations aggs) { + private static InternalAggregations validateAggs(@Nullable InternalAggregations aggs) { if (aggs == null) { return null; } - List aggsAsList = aggs.asList(); + List aggsAsList = aggs.asList(); if (aggsAsList.isEmpty()) { return null; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index 612860efee549..5c9711a6e5d8b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -11,7 +11,8 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; @@ -89,7 +90,7 @@ class AggregationToJsonProcessor { this.compositeAggDateValueSourceName = compositeAggDateValueSourceName; } - public void process(Aggregations aggs) throws IOException { + public void process(InternalAggregations aggs) throws IOException { processAggs(0, aggs.asList()); } @@ -102,7 +103,7 @@ public void process(Aggregations aggs) throws IOException { *
  • {@link Percentiles}
  • * */ - private void processAggs(long docCount, List aggregations) throws IOException { + private void processAggs(long docCount, List aggregations) throws IOException { if (aggregations.isEmpty()) { // This means we reached a bucket aggregation without sub-aggs. Thus, we can flush the path written so far. queueDocToWrite(keyValuePairs, docCount); @@ -230,7 +231,7 @@ private void processDateHistogram(Histogram agg) throws IOException { } } - List childAggs = bucket.getAggregations().asList(); + List childAggs = bucket.getAggregations().asList(); processAggs(bucket.getDocCount(), childAggs); keyValuePairs.remove(timeField); } @@ -269,7 +270,7 @@ private void processCompositeAgg(CompositeAggregation agg) throws IOException { } Collection addedFields = processCompositeAggBucketKeys(bucket.getKey()); - List childAggs = bucket.getAggregations().asList(); + List childAggs = bucket.getAggregations().asList(); processAggs(bucket.getDocCount(), childAggs); keyValuePairs.remove(timeField); for (String fieldName : addedFields) { @@ -335,7 +336,7 @@ boolean bucketAggContainsRequiredAgg(MultiBucketsAggregation aggregation) { } boolean foundRequiredAgg = false; - List aggs = asList(aggregation.getBuckets().get(0).getAggregations()); + List aggs = asList(aggregation.getBuckets().get(0).getAggregations()); for (Aggregation agg : aggs) { if (fields.contains(agg.getName())) { foundRequiredAgg = true; @@ -484,7 +485,7 @@ public long getKeyValueCount() { return keyValueWrittenCount; } - private static List asList(@Nullable Aggregations aggs) { + private static List asList(@Nullable InternalAggregations aggs) { return aggs == null ? Collections.emptyList() : aggs.asList(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java index 0d2608cd2752e..0dfdd9897737e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -108,7 +108,7 @@ public Result next() throws IOException { } SearchInterval searchInterval = new SearchInterval(context.start, context.end); - Aggregations aggs = search(); + InternalAggregations aggs = search(); if (aggs == null) { LOGGER.trace(() -> "[" + context.jobId + "] extraction finished"); hasNext = false; @@ -118,7 +118,7 @@ public Result next() throws IOException { return new Result(searchInterval, Optional.of(processAggs(aggs))); } - private Aggregations search() { + private InternalAggregations search() { // Compare to the normal aggregation implementation, this search does not search for the previous bucket's data. // For composite aggs, since it is scrolling, it is not really possible to know the previous pages results in the current page. // Aggregations like derivative cannot work within composite aggs, for now. @@ -142,7 +142,7 @@ private Aggregations search() { try { LOGGER.trace(() -> "[" + context.jobId + "] Search composite response was obtained"); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { return null; } @@ -175,7 +175,7 @@ protected SearchResponse executeSearchRequest(ActionRequestBuilder 0) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); earliestTime = (long) min.value(); Max max = aggregations.get(LATEST_TIME); @@ -285,7 +285,7 @@ private DataSummary newAggregatedDataSummary() { LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); // This can happen if all the indices the datafeed is searching are deleted after it started. // Note that unlike the scrolled data summary method above we cannot check for this situation // by checking for zero hits, because aggregations that work on rollups return zero hits even diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java index 1d78ad22f3f85..49e25c95713ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.metrics.Cardinality; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; @@ -156,7 +156,7 @@ private static void buildFieldCardinalitiesMap( SearchResponse searchResponse, ActionListener> listener ) { - Aggregations aggs = searchResponse.getAggregations(); + InternalAggregations aggs = searchResponse.getAggregations(); if (aggs == null) { listener.onFailure(ExceptionsHelper.serverError("Unexpected null response when gathering field cardinalities")); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index ebe4295f8efbf..3ef2affa5d399 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -14,7 +14,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; @@ -101,7 +101,7 @@ private TrainTestSplitter createStratifiedSplitter(Classification classification searchRequestBuilder::get ); try { - Aggregations aggs = searchResponse.getAggregations(); + InternalAggregations aggs = searchResponse.getAggregations(); Terms terms = aggs.get(aggName); Map classCounts = new HashMap<>(); for (Terms.Bucket bucket : terms.getBuckets()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index becbffefff8c8..f8f1e95fecd2e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -68,9 +68,9 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; @@ -546,7 +546,7 @@ public void getDataCountsModelSizeAndTimingStats( request.setParentTask(parentTaskId); } executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, ActionListener.wrap(response -> { - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); if (aggs == null) { handler.apply(new DataCounts(jobId), new ModelSizeStats.Builder(jobId).build(), new TimingStats(jobId)); return; @@ -1602,7 +1602,7 @@ void calculateEstablishedMemoryUsage( ML_ORIGIN, search.request(), ActionListener.wrap(response -> { - List aggregations = response.getAggregations().asList(); + List aggregations = response.getAggregations().asList(); if (aggregations.size() == 1) { ExtendedStats extendedStats = (ExtendedStats) aggregations.get(0); long count = extendedStats.getCount(); @@ -1810,12 +1810,12 @@ public void getForecastStats( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { handler.accept(new ForecastStats()); return; } - Map aggregationsAsMap = aggregations.asMap(); + Map aggregationsAsMap = aggregations.asMap(); StatsAccumulator memoryStats = StatsAccumulator.fromStatsAggregation( (Stats) aggregationsAsMap.get(ForecastStats.Fields.MEMORY) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index 6acffc3a6f745..055c75d252281 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -8,7 +8,7 @@ import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Max; @@ -38,7 +38,7 @@ public OverallBucketsProvider(TimeValue maxJobBucketSpan, int topN, double minOv public List computeOverallBuckets(Histogram histogram) { List overallBuckets = new ArrayList<>(); for (Histogram.Bucket histogramBucket : histogram.getBuckets()) { - Aggregations histogramBucketAggs = histogramBucket.getAggregations(); + InternalAggregations histogramBucketAggs = histogramBucket.getAggregations(); Terms jobsAgg = histogramBucketAggs.get(Job.ID.getPreferredName()); int jobsCount = jobsAgg.getBuckets().size(); int bucketTopN = Math.min(topN, jobsCount); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java index ecaaee5d3bf4b..99923d19d81ac 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java @@ -13,7 +13,7 @@ final class CostCalculator { private static final double DEFAULT_SAMPLING_FREQUENCY = 20.0d; private static final double SECONDS_PER_HOUR = 60 * 60; private static final double SECONDS_PER_YEAR = SECONDS_PER_HOUR * 24 * 365.0d; // unit: seconds - private static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) + public static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) private static final double DEFAULT_AWS_COST_FACTOR = 1.0d; private final Map hostMetadata; private final double samplingDurationInSeconds; @@ -47,7 +47,7 @@ public double annualCostsUSD(String hostID, double samples) { return annualCoreHours * customCostPerCoreHour * providerCostFactor; } - return annualCoreHours * costs.costFactor * providerCostFactor; + return annualCoreHours * (costs.usd_per_hour / host.profilingNumCores) * providerCostFactor; } public static double annualCoreHours(double duration, double samples, double samplingFrequency) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java index 8d5765fa97c51..b6795294e7f06 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java @@ -7,28 +7,25 @@ package org.elasticsearch.xpack.profiling; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; import java.util.Map; -final class CostEntry implements ToXContentObject { - final double costFactor; +final class CostEntry { + final double usd_per_hour; - CostEntry(double costFactor) { - this.costFactor = costFactor; + CostEntry(double usdPerHour) { + this.usd_per_hour = usdPerHour; } public static CostEntry fromSource(Map source) { - return new CostEntry((Double) source.get("cost_factor")); - } + var val = source.get("usd_per_hour"); + + if (val instanceof Number n) { + // Some JSON values have no decimal places and are passed in as Integers. + return new CostEntry(n.doubleValue()); + } else if (val == null) { + return new CostEntry(CostCalculator.DEFAULT_COST_USD_PER_CORE_HOUR * HostMetadata.DEFAULT_PROFILING_NUM_CORES); + } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("cost_factor", this.costFactor); - builder.endObject(); - return builder; + throw new IllegalArgumentException("[" + val + "] is an invalid value for [usd_per_hour]"); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e0b634b5fb9dd..e9f912a3f60e5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -15,23 +15,29 @@ import java.util.Objects; final class HostMetadata implements ToXContentObject { + // "present_cpu_cores" is missing in the host metadata when collected before 8.12.0. + // 4 seems to be a reasonable default value. + static final int DEFAULT_PROFILING_NUM_CORES = 4; final String hostID; final InstanceType instanceType; final String profilingHostMachine; // aarch64 or x86_64 + final int profilingNumCores; // number of cores on the profiling host machine - HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine) { + HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine, Integer profilingNumCores) { this.hostID = hostID; this.instanceType = instanceType; this.profilingHostMachine = profilingHostMachine; + this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); String profilingHostMachine = (String) source.get("profiling.host.machine"); - return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine); + Integer profilingNumCores = (Integer) source.get("profiling.agent.config.present_cpu_cores"); + return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine, profilingNumCores); } - return new HostMetadata("", new InstanceType("", "", ""), ""); + return new HostMetadata("", new InstanceType("", "", ""), "", null); } @Override diff --git a/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz b/x-pack/plugin/profiling/src/main/resources/profiling-costs.json.gz index 1258fb7344b62a8eb0c4b50800ad35e164e11599..590c0ff606201159cac935611c542f5a5cdea50e 100644 GIT binary patch literal 72998 zcmV(@K-Rw>iwFq+fU;!*18{P0W@&6`Zf7lHZ*z2WE^2dcZUDUf+ioOTvaOB1%ewwE zFwY0QiY_)-!7LVm3U(?gYwc7CdUu68+5HRHwBZr_Y_7}h0z8B6^I$e?9R8pGpa1%C z{nvlLK7YUd*Z%ZQ%|M~m3zy9_2&p*D!+yDFe_Ses+xAXV^ z_4EJz-Oee?DDbzTf`a-+w*-uOHv9|Lc!`{e62(IHlPCzyH^lpL}kw;?Ze< zM_<~@^Jy3lkEfZyv#<4J{pi@pkG{4S^Eanctf7F1|4OglpWjZG??0dZ{6C(a$0^K4 z-gkXICG%U7C!gzSn(rjkgdY8*esw_~^R{g+ujlK(K6!A6nZ4`VIx~KCIfYix5_~y?uJs2UZbQ``)=N#6Hmh{{78f=Kc1c;3qw0`{4;yn`gO+N^P_#jP^ojD{;Gi z>=M7gcOrgv&#b%>zq*aV>icy+`}R)EqLAG_w_4V2T`Q~y?W1bK+ehDQ(r*@bm^R$~EoI z)>@k7D_Y;TO)1_rEYp@%fLFEt1_jvE4P4kexxH?C@ilti*jE{Y*Y@U5K{9>d|s}=kG9bbihu7N zvDiTwp5v=VfYm}ZJK8?mS!f`zJ&l&h-XSO>AqUd`sD7TQ19*K!>gku~4< zZ3SUQgeaNu4IxU3?r<;LuVtcfbHQh~}9#(eEnG3uf>=iiK&o+)JiimCS zkM)zoXzVw?nLj%5k0?OG7AXR>Y~Hn<+hPQmC!ExY#hv+f7lTnc=r*_eoUjcvpIljy zX%ljKq@!H>Ty5yG+2?ekf<=eatrscg@2(WKPXe03TheTF=B>}^h2CYWh>w@L=$d>- zB5a%^^h6I6`w}jk&SuW0AoOE@$bmkwGVk_z$e#Ld`)c`Q^z-~e?*bOUiY>Fjhg{8@pf*6AZ*r&iq8F36YH>d zIIaKuy-*U(`_HzE<;bTnN-Izs1bY?7fUXuB>qw=+?fLoFgnM&)u&wZfaICoHS=+OB z4>n7iUc}#}+>r9mY))RFGjwzLIp{kD!FSMa`T5eOA9AZR?X}%7ZPR=qvD1ySi~+e_ zmp0=EOAv+tsA*?xH##BXs!AZf6n=aWHJig|-drkaoGR+mUbA`UV2bamn_*Xv+GL-` zIBjBmvo&~?>zmEI_8!gdNCGaAt<#$#_a6&fYBW$k|C|Kjr&kC=Sk*V2`&V;QQWKG- zz=^p^B4tyXG}GB=Ba-daG2qE+1F;y)G~3}+X%8aKD7Fu+gOw5>%$v7VXaci}k93vt z>egxhA*RoGrYL|fyccZ%X<{aykb1<~=+|7vihB{X&&D?UPo?NAIa5-I@S8}OPim5n z1O1e1tj8{r@#G?`QQjdD54aXe590R&-2rUk?bT8;ezs{r7N}}oFj@-R_BMtzj;_of z-@ZTn_46w%oWrw9JX-c3?YM9ikJbo%JN3inNjzM3AM+5MQg1{KZ{sNBJ4!f%K3T9I zW(OZq$)nQkmM70X5E>-%QuH-^TnvgwzcV)Ms;Un7Ed{|0Jo-Su0G_OEcF3#N?ZR>G zAh){YQWSKGsL@a&2b4ykJ6s(?#5FUyXCzf0Ej*>G4g=neO5PvJU?-o2a+QL9_RE`O zQ8c!!#hE%2d$fHwW^#Z?JAy*io}BCjbe=w|IaXm$HrE|0n}pYPgUeai$5iy2r(Hy% z<6pJAbX^G63pq#+e3978SXELV(W4rx4(uh(?krJ-k*?_6bWe`9iY13b-|yTc6(F%= z-)0n5xSR#Nm*DQUtC<9A8KK<^)ZIrgN6HlloNH?U7+DJuuNT%w8dAM;_@iXnP@6Ur&`>=W^l$8*ep0HN1m=L555PXxISx=IQKcaI zEO_{|Z&rf$z=lx@1#gAU{XP%cOarbwF^p<-;9z@+WKvh_&;|Ue z#fG5+XS>($ClO`jz|=pLnb_D)w_|t)tg9E* z>^hbu^~hCCq%Zu5@d#KAsJ zbBCvgXmC61`PYmSWr${j^gLk%0n0HHr;fCHS=2deu`e9zNb|Mc;KgNHE+X{wz=#rx zNX$G5bSi02(GlU31!cW5NrUMD7U5V&IMlEam)6}+2vplQ03uK*CumlvE2|VdfNys8 zbA@-f817fvCW_jlRLE)-QO6l-1B5Y6No*FX-u8qnnswH(IT%(YX zsrQ?SjgA&+;SZvM6J#~5x@F#ni}u5Nff89^*$?3?9bUrh2X39Cs759%bcBIwW0=N0O-m4Y{M366gpQ^ zbOLfms$_u;%gM0yu+OM522}!n_8nn&dKC7Pp}5u70y3fR5;g9tau^v2!oVWp2cAr( zs01mR4y?%voQ#?-MR(k{Z~waf{KNj?{R6H2w*CCi19<4aaV=_+c?Xhx^d~%tu|b~o zj05_;05$5xuN&C6&_ua%F3+L$7^0%#6^}o-y$v(ulxJA znH9G^5CXsIi&7>Ohrhw7!-r7v~R{9|ixm%#r3=`xr3slC4 zZr57D#k-72n|bgPTar{V-^yy5-}%KXaln~d^C_?eZ2*6SXIsH}+aR;!(&$MH6!`~u z{YSgSI8={MN`KUb1Jw2O43%p7ANcQ|(d4OajNsvMpP2)&8AErdyp;8Cb*PMlec&j= z;kp!3qJm%0rM$?A??@x^E=1D)rpH$hW<^>A-62FKT8`BkzvwY#h!Cyi_T+%$zW@U1nl#=D?Q`wBETvTZb?L*;kGQJ>Cg$|$ zIP3+Q>97BhY`$bI$z={?R&up^@G))YwBo`=lYCW&ZUZ`yACJmZDat>wjB&*9@z4Au z2$`iis9ppIs&+h*ejmlzs6-DvBo9WTUB<=iu@RJ5oKLf~(?{aW-ivI??g{TazUgiG zWbx2{3EM)4apHE=iT5)KzbU(N;0=7@`XWOHW(g%Zd}YBY9UlCZcbU-khkQo+Yz_?* za}qn&2nji7KQ-QbpHd+Hgq-2MNShKbr*t{wpowkI{lD#7KXwqH@hQrEL!|2H3c8#IhK#1-wHz+q4MG5A^EN)Y}G@wQ(h9kqto_4y-2ujKfMq-SZz(wjpQqK*rP z3Uy<<30}B|1V;EQr0B4O?DtdfTyf{Q2_Mg%E9d!an6ilTXeIi<2{tg>F(C3@!H5YU zR{Y5@j(KV=d7#&}1z_cmfBp9RpWlB827PWm9C|y$yzq-*rwHyOJU8>gmy z5Tk64Y#5v%*roJo4iAeB2?02OFe(@{PKScemV##o@tG8Ra2$BQAMEQh_v~>wD1u|D zT&{g<8cegIhKEtNrx!t}$6do&`TSHDfJcQXc`$zjx66gPk?l;!$-6e&d)vjQPVi$L zocxq5GI;ozs93ApZ5%wvOJWqotroVc@*i=I?hU60awH1Wz){F^mcFIY11vWaJyR$S z!-}O>*)|bZ?!%#b(3EXiSaul6+Kz;{uC}kpR2<9{rJ-f-K46ZWayeslX*>jnuh!)8 zngn|1!NQdQWSzNs2tGtE()7Sk&Fg-gdiC)jwz<&3W#A$#w94onBs($rEiRTC=5|iq zL78WR2rkSAQBiY$tFAoUZ+BVe?2~P>9PH8caHEfX?vq__doa3iK}o>FJ^DWpCEs1< zY2rK$-9s27cAAIk!w$CU(}l6DzVRqMxUe-<@SKCE{!(?Gj0LI=hIp^ptlT+I>hEtc zZ9cNmZlH2J^_(3jUxJ9jN6YW7;`Vn)EqY-5rY3*53>@vCrs={9mD{<;=Hk$4d$M&- zN-V-S!l6eK^GpZlkt#=N4vKae^vYS+VgS`W+9#C($(0T7twFKPrX74W4iL3~=dmBY z-}ZUkZr4KV4vJNwIdp)PRgh!gGv%v7bMWkJ*9Rz_51wOQo9%lzrVnp(VbIf6u?F%f zyV$z_rhJMVWJE8@!lvag9q3-xH|`-%7iY*Myy?43){PiH;q!D;t|}!9rT}eVND&QA z_Q;tK+GMUdy!DuGPjCPF?3@nGg+gYdY1KgprS8jIJ+i87%B;h5`f3`~x@d+k(o?af z;KEuK1ab%B@>OMw_UU0JuIvSPG4U~=u=hc}EhHCQ(_X3$>3^DzgIzE#BISB!4>gqi zk&}y&wV17h;-QBUKvd!Gx=n_ARPOVj(mOe)d%NpxI+ut1L;8@^h!@cZZLip9W9B#J zwi7ojkc5Heq$SuEcRIR*ux?wDU4J+7*R=iiF?EVDQ0Oet@>z{3`lHUM{Z85AYPL(%?jZ41En1vD2cEg1T?}`BMg0#%J_8h9xY#N$3GPzl z)AlSwTk{rh$7k(Q#m&o?-Fki4WM2p2(5o_!cW}>9=S0p9gVKeWTUR-ob1(+e=3_2F zq?e|p!#wcmC5!o!gJe~z+~<}J=b-FaH*Vi}8n$^3$J6$~!7$Oxz&-Mwl+H6%k5T?o zkH1~qE>5B1FkQ64uGXeUA8s$u>3}1wgSqwrz92$5TwqIi^siYKx_IoD#NO@Ig~M&T zvlWlP0g<_wJZN-cW;wX?m|$8Z4~pJO!tJHXKuS(?yV|JAbTEs!N^iyLU@v;s)lZHN zOH^7N^T5cn?zKa4kqQYoVhoQcJNppo9@ACO^CE|?+2JpE z(lk9E69=JhPCkj0pOeJ+C;nL^&og_>QtYJq$b$_O`;5yWw^5%kVc`To^MttF0@WcT zPeyxiRr9qfK7RCw0}!)B2f3>>sl6Qz)5T%I1|mjqn7C>ZX00w08u6Vg2K=^?)J_ED zr|8c7x3dZzjv+f=$NEnz!&I>q8Pu+Qn4*aTH6~EGl;w(L~ z+wDVhXQd?iOpaS`S|ovsyGXM|{p--=$cVLD?Z{J+&jQ;Ivt>?k5ObC0K>#t+Vrp|p zbdCb{^`KCQ4S@TnSE1#^75i12arRRSA-kB*Tf*#8Ty%1xV9C7m(EfTOdMIq{BjWZI z94@n;VjXcF#r7A7Wr{z+OA;j`pL#rrh|riELWExw7?>l^45|M}p_zlR^nd@1(!inA z;h?OsO`+m$fRH`PiP!V>Q;>ptqV8ndVjEfFdS!Rtwuu^p;|MSLFA>A&u#v?{I){($ zTOoBF*F)PFP&gn|EZn&$i}fuE)xj&}Nn!iNfi<=#3|e)2=80bKn5cSs3bNP0bPLH@GyFT*De=u$|mztu>NzrtPI=7h*lY9JZJ-N+ktsX-b zWm6Pg#1mpLJ{-!ri<-)&6E`YYn~f-`1AS8hdf0yo;3fLZt;OHaxSzmQxu_tQ96ije z-yI9Kd*{=>Jn{Xk+@@P`IPa7p!gTl-UL;E-*@L50T_Q4Up3B6Q z_PJ}>UgP8UO-*4X#`vVrnJ=hJoGJV+7Km-BTA;nIz5UM$bMeUS*_&l`VDS1*OsC55 z;KbRy&{N|h?5i|@ap2I%Bzp%Br?ti1E^-$m9S4gXpX>%OK{=~D(sujDvrE#7eGoMViM33CcfiK&loIYE@x!KA z#N^@tAO&3m&)&~jdWuJvv{^gK((p+bsDIv5azMGY(81ym1jPAFIb* zpxiw0Z0jQ9m^_jrhp_U%_2{$E1#sWet8m`K%kv{g%FgUAsTBuLA#@jK3)8z~N^S0DsYtts%lZifEocP3L z@8TJ$kP3c?W|?&0VA?6E?hZJ`%1pb9HjDI7tO#MxEX8GZW}eWwJA|ly$UGHIqd4>( zs+x1?EkFJW5sKBPi$X~_2xkvZd27Dq09-d|L(QwwbCZ*HhkWqoB>H2Xyme_ZlAwEl zvdS&pqu#Yuq`@N?QC$YS;_Wg$vUteC>v0|FNw&+1)Ng{J7do%+Aa_1w1vrM!| zIkg1vy5$~tD_=4Fd#B^~IP~K_5h4U#Ig3AAby=F8J zz}xb@okhignJhrw9$X9~Y@N0bj(L&QoI@ac*C}fbPV!10*>3-dkDaPCAMuG)ux;{^ zL(-kHxAutXQx0&rZ+cC_xhr77-;X2;_Tu0hanU}1F=P+PpS_FcEFL2>5%|tU&NA?w zPs~1uU3S7zkNl{U^xHCAVp|nX=MZHmj_SrekI&Kq2DfHEiWn7?vRyWulT(%BXboJ; zkX3ExaFp6mb$BQiIXzB)&W|NiC*z|ruj{91P7enUJk0(#{sMFHrHcS_`PB&s_~|M6}}jP_ZM@#24Znqp=?Nxg-0F$12Zc zQQ4K>M@;nX8FUptps`=UH%417D&CYL~{h@Lm=Qib3`$Yi*8J=5^b% zN-WBc3Dnc$@xCGQ@gn`YdRj6D zhvI;HjLexHw*RdT7p2a8#S8Q*UA(tD%;6nLE)PCOJtOkcca-!+?6qR8Ei9tX8D zQRuiwJxkSK;aAyNGHh~iD5r%s2|zYs-qVka_?+xp*h8uq|Aq`6(6F~ z>L%dD>20o^9(6d_Myl22q7f1A4Zo+eOlN!0T+RF&%shvd@Kq-L!BZ{{7mZJvK>E~b zB_{2)5_3HDobhgxUK7FLK>AJH_t68iR6YUO4POzIuv(HsZzGD+5<2g0RNe9zt=6oJ z?3*Fy>e66(#W|YiwtaU#6zP;6^80TO4u>raXCX*^Ij}no7y>-2#Dy&$q;Zr;@TMJZ zOzFq#QCHiA5#PS@07{j^6XJB3$pruuopJOi+=WKRq4D@GZL#nH?WyrfUr4vA4l_QH z@`n44A_}1qs{{Nep3i|B*%wy~JZ+Bnb{0#fujg0%ejt(^UUi&M&h+TO@0hl@%Vmq% z=tOluFjYG+@fJRJc}I>sJnk`jNPtVYvtlnBi7MVCKX5DwB2EY!37D&tFeDWd{b2HVAWgKRR8KSBGA% zoyqcB5+jG|a&Rh#0Ur*7m_GCI8$8q zylY$w9KKYs^5<+ba_r=nk3m;5{1#L&b@t$l*qzI*DHsm)K@s_f{{ZbE*BpvNReWX- zdc9SfyG*i|);_x%g7DkH|9}jmbl~dPMQ!3w7hD|-$)AO!By>KlTg%tH{>PwnJN$WY zu(fmq5ye4QVnGf*O2RiaQQ)88eWqzo#re=>BhCJGdHVCu&z2}Bp}ZtD5T71wP&b!h z3-Xuv?bGR@)(D*R{MZ*-9V9HFa42c40EQhblvuoqKe>uZ!hd8X7C5 zI84R`Xx>~H*WwTg)#0ABp~=}CjvyU?;lhj(<{xo-U?*Z5bk5t~`rkN2+&6u`nvG-+ z_}Sh9TwC>OVkhxI`To#X)c&Xhj~t zvo!pe9u~n9x0h3j)aEeenJ-SP;5Ho84@nZw8mqui%Dr?aNj{+bCQZ zE}~!?C#xYjjPPu3c$pZe*=lIzs?AlGTMUj8cW|6QdLrjJJUCt==Eddr3wJVyOH2*I zChla;lH$(E$v;GwU9eBnSx1S5UQFAsT?sDCw1fIj`oz? zVf9<$KJNSRL)a4J>~qD-+|%g8@Um;@*d4Agn}kvwW}Ql2Hmb)c!X(;jwyW7GG0|c> z3K`W%#l^+M=!iX}HkTq3WSkBon~odCa3CexQw}}RC zgZ8B=Fe2oJ(Slo<5`*oUwnKk^YnxG7H9vISLT#G^ zkx;y+=HaYhX4;10apBwZ!|vm%Y7cN7qCbSfC%a%zLLAAVW1Y~BAOEojY_q;rPosJ0 zpuBQtKRvGVU-n0yn5+IP+PA?r0E#|6rRH)vSWqYopR;Y=odkrCw{WiF%7CeD)GA8r z>|pdIlP;126~iRDdRRqV*2;w6MKjI3@Uz2JxistLtPGO5&iqmp5ve(hSl2lW?ktZi z+}S<$)>0RQJqV{m1DGy;$R90}`I*x^ zg@~Q{7KrAleWo2;Vun`1k?a=)FyGyo*9o*SWC^LrV#M2i&vz5X-2-GFUl!9mF|+WBo2(=;?%#g2mfIg*0<)P zyLjCTiol4?hdykQpUN6S_?(C3&C)7D*3tdsz=TpXjes3v_QbBG(;7J|OUYwKAc!~)z1UfbXsd(BQuImO zRkkjTi@it@;RvaCoY9uVwfbB#;*K1d182%|w^R=ZS&6(oZh7^Jdg67Qim#Z_qYxED zF(09*;I6nNqTU)Yb<8C`=d%eJxjXEEm8oVQ*sfu2=V3N+S*mi?*)#l-e91E3zy(VY znkI)aw@`3;7&BeEoKiX@uU_lg433M8H12M>+sc2JXL#+?N*nXYF0s;|J75MM14;9g zD0OXR~er@zX9SUHt6JaVe6zrewyp_=C0(c$qIo!rYao;G7d z0Yf+(&58!;(#G2?zv)o@2&mX6xLwq5T#xuO$)nRDJyu!`4GgB={s3Du>!YVs=Ib6r zRMEp^>ZoP6^;3}f13x6E^3tcP?4S=%`bY)^dzsHwPmZtP+|f44;WRj|tK@hS0tKzYJc{B57b=`bED zp7Z1BZxbJGepJo;m<%dD6s?ul>KgJ_J>?vhr|DGjn4eJoLcsK6IydWfQ3@RVCmb@a zOW+rPnsYwpE0iBCp!oqR9i8xxclA2S4-ZEkbGy%vI~~vLQDE8Mw|K07MTNs#c>BUO z+jNgur2oWEIpN9pkT}Y|ddx~WuZ|7LkHt{N2dB)&7qF{1TqOFPkIyl9hgYR-UG-=om$ujk>4!6UROw#Eu|^@Mwg7M9b$;VxV)^_qpLJCJ z{5LNUH?Q@*%_|+&%?d)1yoyJcp{Cd&Cl8pw{u`o0qb&z{C3{pTNn{9Cha*&JR&XnS zt02S&=pHze9sU^yZ{`o}vs&d)ke54fek`{uhTARdvdL7P-iF6TFQDS!ag8Z5qC+X< zq+QHk4M6lb=u|ab9EwpTjd3`K>=jg909P;VpH)lSwR(0aMVG=s+F*gd{%d>nW4xQ+ zw=6{a4g~(W@)O8yoSGjG*&PnEFiEy{=#<3k=flV&MytmWB*WC5xqiUP5 zU>^|8W#*uW(R>Tkg(H)F2ixwOga3A=oOU@KM1||n07zNn;Dnm;ssagH!(Uk2e-xJz z;`8Gtc6>N*uSXa(IkaE)?VepmJx*-j{J6Q1&+ZaxR9@fu8VBUBL}oURk+%JkrbiEN z(}H+?tCE?s6D>&13#&tqCrG)8?}Dl%$IavZRYs~V7rYvWxgeop99EpbT=SUNY~rHB zHn6DggYy+Mt+;UFi()>O6Q4M^27Ft8fe@}|FT7X;X^r7g%oV||=iK_GQ>(K~wdA+K z*%p|1Kn1sp(jJO+morFw+*BQ~L=kBOtDJ>b;qi^-bmr|HpRucW-Z4^(a5 zpa(hHE$!2~z}9io&njv-dfbg-;d_AG>T>tM;H0<;I$GGgcT+mR9bbRbNv9Av+G zS(D|(={osK&TYj<)Mf}rtIL#JOp*92lK!JLc+6>6V%%3nhsHOrr>lZN1WLj+V|cWu zs`;8Vcz~cnHaEJ%=1&A@5Lj@=1oE}sx6}95f{yG_e=e5SB`9TAJ zkJ3{UX0wm*&oG_3b@(kM2SM8y>RVA*w@%JWU;HpDGOlcx!Z)#ZUAg zw+PU@M2Es(4EIZQ;inlo6%RL^SDC%;b_uA+*X}VH5gmX7hp#TB>DZP-v${;*#mi|a zy3Agd0?-#hkqy&_p&*1F4lT^ZJf*vXA;DRhqIjVHw&$WtZ?|%&tSuwEM{_N}hY@_l zmc|X>QU6>eyutZD`wb@Whw0%sp^c?H0Q!}Z_3bYZ#1n_{*Gl;0IhluNcJLg!>Mb_o z@E{SJCDyeqNYA};S|7#v@PJ@>> zDFVi{G*{DUYXjKzj|016C4Kw-eqZ{DFZCd!*}>Fzp@MB^*4fj|=;F9y7brcuuw&Ob z9h^<-n}-1DQr9{j@8zLQuE-t;mJKl7QGEXOufKo(@hyJ(S)atwa8D$j{cJtazhlSR zk5&$C`2>HpR#PVOyOq`}-jVEPKlw)z{I-6VQHA|*&0ndREWob%l6wkc``H&^mcNAw zO!Dm8#z`t|lk7_AkC`iaDX)K(m+v-Kw83fM>C)Hg_Ri7nId>@}x+~x4;OTY~e!GV< zJ85544^{8EqSbPIl?1}OAV#%Ir3pv~V zz2VB%zSi#1b?Rdr`t!+OZoijtg7v=g)lB4Ps{pdQ+`RM~S@WZ>T`7bDYERCxJ;^h% zjW5fTj2kOa5{8h`wJ9};IV4!*tC$eljaTK_HPK>`_huTbFkE*h%h=f-3oXUX*WF7; z?fsM8YHnEqjov*%`4jF@_Ucurj#`F}3y6<4Cn{V4^(U9Ev`V+niTbL8aHfnP^4m`r zaPZ7I!s~7C2)}Ln|C2FRt@N}N9(uX3I%o@~x7T??#Y$E{d7Pc09%`irh^U$)$%6XL ztJZptOzUMrB+%REm(UR6V_aasZP0F35L!Y zg{hG1j^PwD)+N@!?w&W3#Gebj)dazqR{4V7P4kqv|oq=Rf;aS_ZTPU#fEFA2G}b z8&w-u_h3Q|+cF$-ImFj238TD%eU<~hdREQSw}mK5%@HCjQ0zMsQbjD}q0o-O!b;fW zWrK}PtLvCD1WQaSPdFS=IT>v>T4ai)mzY@NrKkHH$@LNOhz&`9Ud{pnVs zluT!7_C;Ul8~~-_=~6oeWkB8t)uAlrCm?`R21`yR*vFRI?8@7sQK=!NnU z1v&_aMH^3U*ktDzXvd?X#vl}7U&W=t1t)No#wMq;d7C!$595YWbGMAtpoDE z1@|^#<&+}f%{=G?+UEB1A+&9_y>a|vr#+Q;?})BoC&6yfXJ{7lf##V9OX`*Gq9Hyg zqY<4Y392!v>RB@@F=Kl(4-yl$4A~X~*-q)I!Uo79WjzZPI|ueu0|YNn`#7I9gh@%V zq6BUDy0CB13W==Cp!~DO!y{|O_l8!ezY67UzACaT(qmt>VO4_CPxAtecV&U;qg*la zLVI$Xo46c;G(bOdg~_wt;@}m_V!*d z=&q+$-}@Pg^&v|&jFdn%8K#*t=NGy z6eklBkQjLbBxszGOJP61F()J6U$j8D3|^!NA}3|-JV_*>ML6{x!q^E>w=eaG^h@`U zpan%#V{O%h4G7PSJ%;SwZMI79PC;j)-Icvqa>A{f-~`P-mafsTZP4^;Fe^%@x?lU>YuGoS!M0kq~RUK-<5~v5`DDt_izJr}YG8N~ zIhP6D63C6!!ZuF!Vijz<<;lzG?Z$$u_wd`mDyFCwT2UlFPgFkDW%XM zs_m*%DZxn*`RJkb?e{;w{}T0QKqRO0!DKIo;zC3EbhQsScmMlaOk1fK?e;cq&{n6= zmPq@V73XJ&o~R~z7T654NH6MDMnQWsC!B(Er%;Ok{n(1!2P}#dKj8ON9(a?wgNc$c zF^y+kHsQauEyBu_K~TGi*JoWKH6+@xVSCp@`1Vy%GqG>1J^UB}T0E-I87eUrj5tWV z(MfqJ)2O*@kA#>z|3ybbaZa2t0B8GWZ_Y(7R2|f{Q{MTgQ~Z$Y+P4Ib7Eo@c6}n6b z*x4(bPrL54a|abXP5 zxg<6KF&c!xMdV2;@cPlH3`czo1BZU9m{})hr?7xi`SGn`lBdlG-b@@K&94#_`NR|< z^S6&v$;|%$KNpdjon$(O5ZHA~H1Soxhggkykz9VB?t=HOZTKR_NoThE~jy%e46$ z+oNblA(A%BW`prE^^;?NVMMy-+IlB5TjH zj!DR!Dp41)U#~@+gA6^qiV&{cJ&I28PEbXD6{8MPp*)L;<47F`X_)SKy;92~o1+)F zkkjmaSpA@tVvP}vpGAHX>y)oH5S%xMNoX$jOPDFqLuwhx=s#Ot%?z;Bsjw1Ai54lv zM)zSlg-&_S6s;9>y3}cew5m88SLoTa>)N1~$XVJ$AY$5&WQ);y#3n*QJX>I~EMTC4 z*{Ei~742YOSFt*;P2)7s6!$9qnMZ~j-#${Xaiy@b!ELO%;VSJ0Eh7QOt+@!XFAVEG z>nLz2CEmMCMbYaG`}^2^W~Oeq_W2DXsb&+tsTy zSm+C95}Z#_?4Umw9{=y0D?wKYkX0-cB8b$v%{lGd`_7j6_V6LlNXq&U`SBCug(66T zc|pBqQ3MTm(k@c_{LH6C&Frhvmc3jC&XaB%`wu)$9O|_HqYE z3;kD{4|r&& zbz-WtGIA;{tO(OGr>nGf9%yVy2XZXRMA|k9%IIYN9hgNDbTZC8TOw}qpu3Yqo zX<Mg~dHj6u&o%!5b^!!*<9#xrZ0_-lJzc074` zaHWM>dTXOAWpsk*1J{qGWpWJbRu`kKoFiM=E^^egnz(8q$0`Ug!Gn7Z+US(uZf$uH zxmegjS=A)?cnPFs$qQ+n%b9hN3G6?F3>!&F7j(c#P0Zo;HA{x9B(C=`o~%SFqrgq+ z;iG1qC1MO35(Ct|6isPWq=vb~?K`bkdcumRvQg>O&+=F(_(Ph|6dP6g1xQ#Q(_+W4 zFS&!pw%1f`d~V_7o1E4qHkwQ=m66%`td$!+r-ey>)~zg-`3P~+(WSLo<|&s7heeCX z1UsBA+{YOZyB}N89*V|BNuZ+c3<1v>L~p{#X4h`hT1rT?`xX|Y169r2(PCY24;X&f zgd9-`4SbyY1g2PUiZ~Q8ZtCNKWWIL$%nbvVEe?il*3?S{$P9Rz+m%IR7yp5RG$aT| zF2cwB!n)GB7(JUpXuj`63HP?ri*efOvgN%6lM0j8Mpub_ilFRc@friEVBX>Axl`qT|~YB$_*+n^&L zWYWn%%sLXqG&alzkCsK8Y6*~+P#}qT8HYw`R*=qNJ5=lzVq=H}D#UaM{~#E&o-B#RtVQFlLcPtN)oP*Z$Vn=C7o7f zEv1#F9~P)@Pkj4;11p;&#_+teZjK=<;AcK96J`0B2&H9)<3{zcM!M|fIFr^HK$^{~ zeE?35%bL5rQmM+adC#tINao?%dWQR52_=I=AD!FlB59phmY9j$FVfYXMN1s!q(gOkzr#`Ht?n@y@^{bF zzCZo-(+E)&m3$xYsw!{sc;OlJM(4P?rw+^Up;f*>^XIVu8L0Bo4ow-4WWx#N&zc|}6O6ZpT-rC&?nhtQkCHSj%o+3y)q z!j4$!JNhTuSONbFB>c_KxdRUO82G5u^dkO(&H}2r+k8P?@=CEl578r%VGV^}^GX$q z7m&u7@e8`hD(G(CMsb9yOD5sNLD}bRmsn2@C!Cm~&U>97?0+DM6FT1OEvPn_a!;v= z0AoO$zn9U<&LiV8OVTw;q_1BFvgL7Vypryc6jB;Lq2J7L4uAS}3#hdjVYe@guP`kV zma+^bdH7a_m?+p7LT#l?uP)bsTJs z4xNlzL51JLMO?%D>Ges5-|${uM?BRuJ^lg_LqLzX*T2e(;?uVo13!n<_`P{_Vzw(z zcBrF8n>0MmXa`QV-obVuS8*rsY%?H|L!b04az{t!*RKsGx?H~=R2&J03NsgeI$!kh zcK5ta`~^?q{Eq_{(U<=E?e{;w|FXYxoZSKG5I)$2x56qXJQoL?OiX+@5~Rg4jURDa zry*vjBPWkuOq|lZl0L8!-hv2v%MPH5zzjSl8={zw4&9zGLrzY^PkZ`P?)T-8z`cFt zIG|dRO;GUwS_D&XsT0!!_HLq_;HZisB-c~pP1%oh5-GbF))EdWqdTDL!XzblAWlO5 z&TCzB;=nhqS~u-9F%%(bFXa?7?)5JEbf`J|Tw=qttY{rKlRB=)0rQi5-Qv=Z4G|bH zI73MB=$pgqH;;(nFc)~S8BWn*3k#d3I6S7>_8~D(rE@^BD0a~BpuY&bHFIKtjMZ#; z6o&;WAnE9E5s1P6Y!1W=@$-3+yD=119tuC--o*LrZ~$%1p2!$zt>9+tMTvS^cer8H zRHpfmYXmzY^x-iX6FfhBPDm%~#EUJJxW*UIIV%Bmb=YEpJHjznY3o{ek!Yv)Y_4XJ z-vDpQH?3j7TMkU>mm>2-(zhAy1j_zYN?(zFx|U!i+~xu{*pZkv-4`zhsmx*Yx-XBJn12 zruRkdVO&DIzWpwcxX_}*{Ow7d)M;=Uq&})+^5Bw7nox0=3mm#Wd0hJ{pu_`M`TJYb zjzR5KQ{v`eU$mll38Qg=l)BskJ~&>AmXMY1Ai)yc`QL!jqW@*`j0LAlRj<}09`>eN;JXi}`O*@w4C-wbUFK z`l3PLhh2SBy?$`vSBuLyyEp^cRXhIz+8en?hw+L~vPBQ%-8M=cE)p%#)Hv`5Yg-f#6`{(8_b?VQ!(3bQFh-H&uI4cKcv8n`@+jjhW1U$Y2%7efj~)=_ zvzkv2Zol<`>8o8ZAQ9|~Z4L*8EmLV8{k%;+;O;1AJI{%Wy{{@*EZ^3?$K=(9{T!sgrh{!eanSgxL}pxK zaBXt1!S67g6U)bATO+yB`tPE~>gg17oGwyDVUIY6hdGd{bn#bsipCb$=G_ioSk%xS z8Xm=+uO|l!(yJoi)Y~D~UB=xuiEcPRes(-wIt7Ok&LW8ougDU$%QE*_+r+49#EQ$T zA2uR!D?ziz094$>gVSR+Rk;Ga^Q<2CnfN2PvivMBoZ`SjQ@YgQu+yGxOYqF+&{ydm z#y7!k4Iv;2PV3p32KJ;obq);ehQ;v&UBR(#a$gqySwLf#hX!ousKIaQ&#pqGLeUWx~6+x`F^uIaXeN##^qo!#mXO`u$$ zJmOvUQ_lXv9#x~vFPRQg=#|a!4!l}LwL^HuSw+sfl|6cy#pZ{M@eUv@5q7 zrF(=#n0%+P6><)|W-&!i4yet+C@9R?T(XEn(@|RY=(`n=>_hhua{J-YH`#4paFKfp zcDctgve~s^_(UMs_*rxajl0TZ%-pKWW;1OGW_B=|&!?5)vpYptaM)Y$5KLLr22Odp zb!BECSL03D(){|1G^pm@s;}A1D44?oSu83t)a7`vm3-adqLic7;Serww`ToRhQra2 z7Dke81-sz;@f-3gZ69yd7WYoNB(=k)&mCrdWsi@1t}8u(cHq)PU*ovl^tpEr273{t zgWOzVx$Kn`r-NEYBu;S`@AkShWd0&4^3jC>C^FQO1NbT$@047I9LnN0cz6wpVxklm zCuU(J?XksdDUeUGdscqr9#QZcz3TFMzJ6LT#?Tx{)HX*OA3;BCU!&L@1d0Dn1~hH% z=ItfL>cKJ;zO!@s_|A(fXL_8dO1w0b#pm{h;Cy7aWK#5P~i$ziG$zbKep3c z+KhNTkS7(1)0zv#TvTxm+)+im;X}Tw*3Y(#!{Gm8j3|2g+8ydpb&ugHblYqKSQ*@d z;G#X|Ig-DNfUQb5&ElXWS9vD55`ET=Qry__EIdALD+r(0D#wif_#)iAszV)W=@j@y z`6CM`D{m=&9-JyQD)Ac|z>*9|Q?W~d+dUauc8*_~< z%qqFC4D5W)go8ezhHPl``hI=Y=#bFsOREJ)xiGC-i`+kV) ztRR|vm_>AH0ugd@IWBCHrwE?jTz>(NaJD$du_$u$}R?O_BGJB3*$yIQSRNDn@LE+b!eanaJBdM{cE1BCaZ}Sz~x;wdq_$Y6e>ceCl!nAl`M*gb` zp65uyRe7K_4<7HjIKAgA>O|hrJll0*J57({)3$PPaB^Oa!+tj>LV27TcYzf(P;)T9 zR3oS20(+>lr+b*p{-bPGV)mF@im6#}5l9PXEC(alOI;Z+by0fCxS0VB3_IlwcB*E> zbh`qw2M@%yA&m~GmYx669L8n#8s$u=i=&4<5fMt|C4~4BxNpx@nQ^q+ z>g92O@w+H?3m3IYKW%)JzSxz7OJaaXl}`@Hx=D`V4#?YwHP_}I2J0YHvk@&mdHDNV zlP7e&ZLmIFU^DwOOcx%SkQIA4?%UfddCcJird~N^Vx3*mRhC&LHa?lraGZ;E#aS9~ zwA+80&4bLYVr{#_Tw|gC#!elwZ5dMcpjWNZY@QX5bdv!M4;hV^D&dao?{6{b5pK-! z)|X`mJ0Kgp_fRg&#U zzJ;nLI)p;Bh12J~Te9tO9p3(noNjXhKvB00?k0B@Y!Sj%AJ$V_=wT}>qI@ffJalE^ z4E4xoQpnK8hQZL*tDTluPQk-p)4s*J%V1z-Xu!3}G)5O1tjZAU4lc9yB|JT5t3r|z z98MvTR2jku9-3tv+AR(N;TyazfXnItte7_A*jjT34D9z)9a2BU5gm$$7DZr|sYM5G zQEeMD~; zLA7R&0r5hM^p~vY@IFRoHmp99hd5mv55d6)^hw>2r8*e++oz|;`m`&Q%yW=8s3zMA zDUT#=K`cm|XfITV4&)ByK*q1GEegeqM$DN1`=Z`XcFCv`;lw%P{_9s1rPVeVjxJ8$ zCIkB+d!b}g85z8l5(GFql@+R?f0nWSM)FUdyO>Bi90{IL25E7yWBOdFVGBVze?JOEDTOb6DWr?|&{0|kekNa4KKEmp#7Pn^ zwt*`5uuFyz&581oJ|Bh&D{K1W9d&@#`xlaMelSa=Z4J|7dnpxQ84h>XvuIZ1gXR}z z;bYrgWrq*p86BJ+A4v|6Kfqb}pBBePrsam~$+YCL|Avt8l_nF$JMIF!onWknUM!@? zW=qM^U5TYseO{AW5W9ISiPFX66^Uls<4jUCr{27dB)huEvpC$K0;#TLIvf}#{#Oe= z^89iy(_#`pTJ1HS7Kh3kuiuNwmJBx-fhmqqlSgH-@+95I9riI%D~Av461#Ft@gSA8 zSGJLyCq**`@kNV)!c@|J2JbFTTv7pP%aOBQCc0MlfL9bIr_{l zt4NrRa+8oYlFYBLVP}@X3nwmpXugI5mR)?vU@Q2K$I2;m6 zsO&Je{7s;YEje7oc1FX%P2V0%`5C4XZp}b;DR+f^2ryih{+w?M#t7@QI*kr(&1-bl)jSP)^7Jq^5MZM z`>x(H=M|a-QHnd1?*WtgxZbDLK^!JDv2ruq?z5yq){4tG%U072hiQ^wZLecjI6-6YXLs9&2B`oZ(XdX`4$AJY{V=@0a?M z9yk4|4jLvorB4n#*e5kNNzLJq{>Qo?J-WS%M2-12`!^RxtfJ1T=FX7}=~2Vx+jUG2 z>mNz@YZV8|inj2|4@Pe!Q(XD^N{yjc-WR($TBp?kutqH^s;%llh!NkHA4S`BV_7qB z?0&55@^ zG#Vx-nqB%BafJnsDoYTBxb}Br6FNDB88jsG0%(o*Fv37Hb_c-80{xl`++?3^9m|%t zqyO_?@6x~QgIgh~4-S4a!o4d@4?OW%+Szzi5HIq2;W)%u%M#+mf5Q)3Zdx9+V70_i z%Au-xv|%O@GESF$AR5V$CKoVCC%D-hXe!dHybKpG&UQLW4!b~v!&FWby2|ln#sj9E zi)X!{jM;d*=~8iu6Hkx&;D*v1I$?!|w&8FL`NhDc$J0z_iw>K>)X4H3aI570x;*{) z8Cx_Y2je$kWPR&mIC)AQL3q078%LZTOKpjpz|qa+#wAwBf~AH1MsuLBDS|{U$)T38 z_dxdWiBW+7h5Jw*Cy;FlGTJ!(@k3t7`P5sO4tz_|`*E9<9UK)C%IX3>$;3j3{>g$q z9Oy{rtmzDRu^#4{HjhDWc@+l!tvcLm)}`EVv93FLxkA=BOB(dMW1C7B%(0Et*!-oH2J8>=jS=$-p>0yK=S^^Kk+(HH7-cu^% zV|r{ZdjPjd>>S!|+kGi#7YUY6s(boEK*PZEw{3uHTNxCOuG`#U*@MWlmF&v8D0tX< zD2%uF=;It>SJNqSzM0HNPtC)^bJ)%&+(7j?3nGI{t-jtJMxjF^c>tKI)beoI5)fT; zS)y){k?DUR7}{{swuW-t(DC z6g!U0?U@huKfo=KcC!up10;_=)!J9V!9-Kr*QFGihv}}(BT3nzml1t1FCab+Pa*G( zoQRaPIMzdS4HszICdVfSZ=t2;P8)Sy@NPW(<>Z~u^%dKd8AU{T_khoYEA*T$ zzQ-cBWcW-V>}gr6r{Y1)SZn)wmQ&FeHg&=wW=G6aTCFa;dAqdZTX-w&?CUae;jg*7JlFst zW6%S|UpFK8nZ=x&3&~^+u@&suWtJ{P<6(3MGJ{tlxCKJl2K5^q#sg_oBgNu=7ILAJi6*Z2Dhi`xSalg8tvY-rcpek-F8P19TXiFHE%SR zvAvjjXAh`U!DtS2?W&H6nq~a)b znZ4RaU34kj1-~jg(Z#0tD>onQ{(#6%Yj>4^N4G;U5T3m;nH1+A&43hl58j;<`n8X^V&Tb*W3+zvCH9OnPoc4Ha9*+l#0>_)lM}SV0bZ zOyOo8+4WatG<0jqhD*C)TZKgrNJZI0JB&b0ieYzHJvJHE~BeM0{C+lBY_ZHu1M(tlpW%&#Tw$4$I33xfsS|Jw(CGyR0HV9xdm6H zLFq0ykN|XD42Fan)Im&HL_R}o)1?oy2hh>Quv7gL^Hh5Ek!uR$?2@O$3|E1=-D5GS zp0axcJKCTG*UawjEw&+9@L;j6iyydZhC!+>1blld4@_10siZm%TAFKnUD|J76|9&& zCJsW$6tlWIkY{bIH9Fk8_Sfaanrl()I1eb|KZ@wh7L*-eJkOzM~S#{Zurles6a~}9&;-(m-i@ws!A+uE;K4} zSmf$ptE^ikUg%ynn|1p{b>Ok7(AVw)Tv|DHrMN6om5}7o4oj+B=Bh5CHz}=qc#zpg zHMek-5j5EayO-mc)?9jRTO@mOIwtHt!GQ!L{po{Cbgf;qcmxC}QDEaP)Dw52yh^K4mk5hpo+%;-u-EhlkxCnCx4 z6Bpa^wT>}7e3-;)_3^7PDYYD7*@#rOI^1OlCj|N)(aq1w&LegY-(chYdzc1a>te5s z{<_xg?}y+*=Dgug4$~81%;T|v?Z?;T5?-DbPAAXO_d9ubM}5BjJ&dESx)_ z+O`je;1n5RlW84nqIR@NjAwin>Cy`5DxrH9BTYT#b1>`buit+E^ZPGJJ9a1)H2_}u zbtZVV@z&Z$2X?%@pG)!BZ=xLte9-qk54Hb>UP^XbO~MF~UY^fM*stU7@RF5GFgWzx z_T%G7V>--%Y?*s=ND>h-={TQt8%AfF_fb8DG-3+WIY{>_31cJ=UeCM#l>iPfv+AsI z#?o8%z`<=ZgAtrCIy@IRCv5p*S7d?;bbg%ue zIdn0d91 zN$t}^dAgJgC_;^HCs9rgsoLVGc8U(O;e?3)fUWQnY-b1SY~g<5z%yTIVs}+e#5v+y zVB@jdB94j4&KDJy;gPuYUX!k+n{T2uJG2|Gq4z(*9$A)KZ&vYz^K9Su?KE9(d%@;1d zNn-t%&Y>rP&Q%WsM8TfpdarjtznDF@M{d<(vtV_6a=+mBa8NN+$;o3Nu})k6WJgnm z)|$gSUDQFGe|*v5tkt2=U6^Q4*H!3GQM6w*Cl4x*{b8F!1iSQ(9WKF?!W(aLR9C#A zzG;1X_Q+FNNSL@Nd&0_%OI5IJM_oJetZn`&=4apoj7L$nG`FBKK^4+SOm|wK6HEKrG{75+Ml-Ze5RY% zj|ZO@hb`sL%VuR)>hNWS-U=_zmnt&kQcsgE&7WD^jCkqD%!1@{ZTX0{KmWGI!~26; zuDr0D%l`x9hx|Vwg36zBumzs_m|QPEO2#UwJv4vM5A$3ea~wA7Dj&o=Kk7){@#{bI zHo-f=RRU!lpEjJG20!5>BsuXLdZ{ucHmBbCN#`Rou}WtF{NU$rwU6BxJvOn+4>!Ki zqquUy#j+XM^x2Q!K!-6LN`J3;W;SQa=D*-_PwtKa_SpJSk4&?_Q_&;C<8N>?9ud`q zu@O8%SX2M+HiRKo4V>J1_?ZkV)Mi4agP}WTAhrz#(0=f>n z*G7WmwX_p)NBS5#7;T0cE)0ja?-=)-SBh1wt!1AL?J=EsempwMp_15O6mx!YYcyta z_#AY{J2nz2LInQ1n=*Y9yQ;kO^DQ+h%hBrzQH-hR4M(^z9g3 z^bDuSU$AR*0N7Pr*Yvm$#0Q>{m!_JrmWR`b!^d$luM~ddhb{Ue?k`^4e#c)>}pkstq3F62-g+FBl$-76-a)1|sni4MtyTjrI4l)sSV z-HHd$w2*sePM=+v5g&M;iWnO%D2h-72bca)pwiWYVIZ{e$%Vl1`8N*T6!}eOotht^3(-xz0O1-Mz>aBZh4#ky9Ji_dF5QdZLwI1@#m-;ObKq&%*#sXvcGrfD3BHRub?q=!Hj&8#0<|pcx%TEN4rvbG z?meFBP`P%eC@;K-{Pf#BmRzX6c9|8M z0|sPkBYSXZ1MKDL!mAYRuZJ>N)S(=sTr4fu@x`eYhY_dQBQ=*Q-3D+y3M_Mc$A_m| z2`!fXblAF{=4ZnP1!A)wE?G!e; zOWh#Y8zCo;qgj!Evd@gc9`VHHK|!%cgnR%{R~A1<89~8i6S!qY+v+g8iT7cF+JOo! zdEoz|{`VokxtVt6#hJhPaqqJ-oOtOXmE*C%hWvOIqr*5raxHx3!2&QI9(bP(Go=|l z_JJU)dCYt4Qrcr0AX2QFhjzuu;Hx~Sa~_WYtz|J5T^{G^KbF*5)@lvP+A(ceD_<=C z^J~-1UZE^DU@4Cm>5pl`%EJhBqVL|%UZFfR!fQ@X7xSYq_^IXt{glV&gO8yzW$ARk zEOp%~i`{n1YHCGUTuhb4$>Qbl*=)q;=8+`6J6wrQ%Vu&m{LmWNe?!Qt@n@yaMmr^)4U zx-dUDeVt0mV%vr1GGtDQ_vXm+xa?<>hg{%!n>KOy!i<qX%1y`1j2Ft^ox|$qzf#T%k=+Pg!kxR>B+spE}@;5u&0xqToA;sBinGs;{)3+iyl3xm%mwAO_eW? z8X`TKLt9q!smp51kn#o1tPgbf`SmhSrF$IEZYFux%AaarrlR_N-!Gp!(WF}M|O_~|QgXSbkFmRg~f#YWiWaVNjx zLN7N_^WyP*d**$4Q=3mZsa52W^T{@<8*Pf}_i$7GQIFbaeKW?!iCy zie@Yi2X*w52id_(Jy}*qW0tR|Cy&|Nho~{uWQwgd_J}B4_w@71(gp3LP({>8TJg3LzdszFX-BWWN^aO8j(zm_uFAkVI(#{T+j+a_ zYl5e-I&}_tiw95FUcB1~fxaAV7u_@8Si-0*H2UimG2_^&{_xvA$+7(*>*?rRP@Jd3 z7^b6LRh0BK!#8hx>=by>9!%u|yr#A#Daa}f-s4wwm9*PwQ~6mbCW?oM>EU;k-c0Zw zJgZ#mbeiy*dQRr6xoy@=@4(*_&=uHw{}sQRtFqsAD|`B-D+We{XPC{~hpR;!AQpbM zD=iOOEkBc}2@?I9ugYC3S6s$}k=5~yrx+J-7j0+zz7)T0p|_vV74l&M=RJ*xb@PS7 zJ9{}T$@B%iR6XtKlfE8#`?bW+G0To*#(+y<6{HTYzJo#Ap)1>69i~t&+}5T;pCgLl zlsJB%di>09DH}haZ^m4e{cr+y&yEr?ChX}iN)<8Sn%rs{5#*3*oA4yhN(xF`bZ?12 z?Clo+_WPgTe@O?V=CFE%+z8$#l9F5s->~NDVYOL)5)LPcqr^bwd&dQ+(m8sk4V1lM zYaI9n{4$Z{@Gw|ecAOOqluoVVrdwTn@Em(7&$gR$gV^dtBDaOY+S(%>e^teu%ct2q zs(Fz^TpArKhzWii6GJtY?14h&T4YjS*VHs8 ze!d6>7!lT#!l$F0=#JVzAw@^!)q&Y5;9@!L*TEO1=)Gs9U5)^qTH||G z<+O~!XIyqwhR*|SDA?)S*&w+xO#KiCqVMF=8s^&!JMHuKU##};-Y65x8@5u5RKjrtwEh;pA9*$1x? zomz$;_@!v}9pxe2)}}y%+Q4&rk%!js*jv1qUiq0|Q7S2V@cLJI`7TH{vChm{mG%{$ zF;ZF7tEUk3`=MqGkav1B6r?T6X24om*+_h{PX+}?kzn0_uKp|m7AS;f@ z9{;BRAY%IY-=iCVkr>CRXiz>b2Y2anE z4jS4~bP?B3;g8?0LXlDe-%L@^P;Y!ZVjsj|;C789-QOW85Zq@EQofKBVB`ZOh}`jA zh5V47j|!0U7%*)AfC_H!e4J_XVMf8L(k_ePkb3K*f(SCB4Xpjr;X(8Fw=A`3X`Uu# zo!c%o(SbE6uBSLTBUBs)F*@;tY(8=87}`=J07}c(*5VuOKQ}I7QaYXF03wL0K4rLu z_8P~k{V+}7R)h%5a^O~64DK4|=qp`j@}PjI8i(#(>3dwUnEFNTtJDJ*1Btg&TyW81 zzn|&=ESkqc#~Qf36cJcL=AvHfObD44$X)ujRoKf)e-3)wKMUGvt^6Ps!@<}cs82S5 zV9oknCWt z*;TtISE?1>>OR37Dy2cok{6i=h-ao!I?^Za^Obidl#*Q(sF)(hT9>uUXL+}#R&h<7 zZJu9j94bah0>>Z-FkHPuabzpNTf_cBVzfZti?+f`oJ45XW5uD1vXvbf_r_|yQyI9< zj2v|Ea#b9{+uhAOWC`714cx0@Y(+{LIxDy}<>n#PHbZsbmleSZVW#2Gx~?R;>)?Pi z9j-uoSKVHHPimc}nE7A6qa#A{bkT{;*bD@QpFvItl7k+=tiCAi}^iLl5G9Km^2`DKAWU3fTnT;jqm zraK^WB0H`RE>{x-0S|tIuxh23?9N@Wn7?5@84HsKWy#?6MM+g1fB z^RrS^^c@qU7v8aFIfKES;8mx)cLycIDiP?VkaW;Hrlqn)!)zfnn<|5ILq*(3#xyO< z)A|&{#C3;>hSICM$$2BDlqe6sI{4)$R(pU? zd+nGAZDW1h$xjJ9ih~YLlr@u+wCtZ=Ce}9EvpRT;eJn{a<%LV~S6s>7zUl_8b{D0y zVk2QEeG9Ll^2Tddh)RyUmR#p(y%jamT%k=&QZ|d4TbnSjO>TQ#7}Bd=oK>kJGj?xye|uV}gShwo!kq2Hmklwd9AFz;0)Va3UgV0zq zpaKx5S)p38t5km$8an9q z%C!xy`A|n_sUx?+Q+>j0g)d6G>T9L(d`^42j_7g6nmG)WtG#UFF^ZO&d;74cMf+f6khxg5yq*VC?#W0V_7$6xoFdX+BidBAj4RP ztVAz?3wcE@6J^2tEK(Wa;2gY;Mj}Ol&BOOm8IeW@ip@G#<=uPNZo4r>>ncNn9I~T7 zhL<)+EZ-js0gXGBUOn>YB05?U$u{U8|(_1?~;KG9SFY zErb#;Dug-J#G~@;5hE48sEFsZ9XZ{>pTIseQ}c+N`y5V8sq*_o95;necR&B>e6;(V z#VH^0EhUD4x3ai8*no*5gbs@?%9eXnhg2-FX6O~N(>-iIjtVoR&q}isXU&)_VTOKk z>BurHYoAIB*Tj3q7XSPEUq6>A@Xd$g$OZXi!uB#B3~ltx=boFLiIVRbwAx>=YL3wz zV;iwLuo1ub-PBD}tqxHVMGst+8rza`JxfTp^;ZMQ6Wfc7Y++ zefebKtR|K-PKlC*f)xdWifS6vqV@@MsL{uU3hBIuPp@t*Y?7(oHnSF&Wf?O}l51~O z<8@K4-x1y8SM{^osgb+(TGxOH*ShQ{b3MP_G7v;m=Ln*++rPR(uv``&P@g9^BVM{li6fUI)8+*Gvd_ikSM%kSw6zwRm{!m9?ZyF06m|=Z6$s*$x2- z>PuQ}Si|6r`59?}Erx-2kq-2p5i6u;xI&=NLf*d2CP05m;Pj(wC}y1?n~bYf6+*c)-E1 zlQl9bpRRgatPRMKUK?)ou5Z7~IN1*_;Iy+Q&0J1n;;b^VZtudVr;CsTzB<4WaMcubd}T@#$A8^`|F|3^$5g0c(`^_ zKDuuX9)3RX_GVAlu-f6paQJlMN`Cb4+gg8iV-a8Sb)^i)6k?V8I6_v<+; znrcq;wcFhUdh&}vu;K>NmQdkam$k+W@B+E2yy8CiIUq(S;}B-P=N}M^j>C@Y;Q7yz zKiH1gP=|N7IcRIO(N(kX6gz8Ykwc*JXisfXutK47H2ApZquMcVy~9&jS(Zl$lC~txlI(L3mug?^$z8-Cj+GRjVjKN~DWM%`V|-_QTf6 z-OBvLUW@jWs+pgX;)b2@3e@6NdJw31!0u2`iV^I97x;dPi5t9D+IdS2|KwPc;C=Hx zN3a-1r1zhM-eLE;0f;Jn37#}ZgXGpQVDn~AfNCAEt~@J6afOw^Q?$Qp9e6(}ex5^j znZ)%3tJU_TczUJrC20{*ONG$t7PBBcqi7qV9H}yVF)vwMAQTf;pEQLI3o(^7og7*x z`(i4SU#p^?;m5leB__Jo73r%$6HB4jh(%8*V$VNuD% zltQ7)QPxT+$4Wa&>Dkx$WJ~DT=x){2Pg-KXs`Gu~k*ExrE_k-}n>r?w!`$ypIho|t z913BZ)X{3ig@Gl#&%t3*Z|^C-qipw1I`?~3(B8-yA7`)sEW4zxVp<&U$gRDnt`Q~K zbQpz*SvQ0w>pv1xbzs$qW4<*$t$0Ra+B<%~7@{3!y&Yb%uit+E^ZPHACzmPEca>#S zi4ER{&-QJ&CG?cyFhRERgJYf^_m%X*8?#{mK~SXekau0@u=SdP&PO+wjAA%VTme1{ z&fTQ6HT%Gai~yZYTuLdpG8W8cI0RIgr}>VU^~0{pW(waHd&Ar*!BnYfIH21}aI+=` zK|g;tJ37p9A@iKUd;@zu^7=ppLGYqgByMgaV+d<+%Uy{{@QT-AZ1vRPl6+1A+^%Er zx?8BRaA}}JEga|)imF-Lq&MA)_EXbE@8YAlsc1Kx!@@zfsM)xz5cHJPe-sJT}< zHvfrg$TpgceWFo;+F#{GzkUjPZ*+=xWOwVY{?8R!uq(uq8kXR5$E@ye{XS-LZnu@a zcN-=pyp$)698(sNAo{@R<5hBWTdq`pXp{10!Kkr{dv$26#Br7|P$Ai(+;T!7#&~=u zK#J_(A|h_VLg|WUt(%OzEm6d?I{0Q)4Rgk_PqTRKiLV=_k;j%6zpd=Vvs8K|+^mTN zhgK5SfZKk%)O6xk6fq9t*ctZ@;is~<84jd+(Z_a-fr!&!Vq(veH2I{^Ur{8**zrLr zcGQHN?wvwwo@CsL$}VaeR5k6xFB|Hfr_$r3;MOm}rY~4xBMzI!?+^iqb_Zb2KFvJ1 zr&YH2_%u^4G_3>&y|7B8ila6@mR0m8%D_>Bm_G4gY;N0e`?MYnzFj_5e&`5+Q;i+d zXT@DoDCMk(_+5w-h=0-_*#V;L)^Y$SY$4{mBb=4wo{?zSzh4|CIx3{h)K;8&m#V<434d=JB7DK zP4zm>;k+?9Hg z`{Z*A(;P4-ikV%q;1Ui8d%DjFmY99;&bhYxs4y{?P3X;8sr~o2{PVXB)w6#6t=!I; zdrB%@6D4#X1VhmBqe_xM;vRT5^QM;0Z<2Lj>siIt$H-4%F|6wjZUvTg6dmxs-%*03 zpQ%Pd+9bgPA*Ss_h~|M7b3FB&xoqCftK^&~F^NssKGEJ&9u^(98qBV4CRa)b#9i_l%iB({{3r4q8W1SjNe{IToe^oizD9a?B5C~`?RMiLGLZ6{rsLQO!oRw zMBh_x$7Z1=@>MzrPCW8e=HjllnY%lSa-zS)VEYatT3Xgll9MF1{updk)5f{=s&;GLug)!kD@DxSgU+6gOB`M!=EC5^S zmhw_2< z@2yX90M?Y$q0^6SpPyVibkTTkjx@O8EH%;PG#qXhd3ti$wWv39cgQOdplr$x;h^^U zPlLM7^7p4xrPT5bpR#f%YH`S&5EnZ;*?uHDMu&Sc%fyU|-4?}?q!u0@mzIhciaa;f zvWw)-HLvQ;Q*qC062h=hBW_ZZcS;Tp5usunLG{J&9<0K!31@_E4Xz4HD_Cu!!1jTi zOxO9KNtR2EZD2v9`C<1WCt*sU1k%pZ2nN4%An9>e}+zQAYG%AjE^?X38<>B@?^thgrakm1s`Qi@6fl1nf93}jb+ZmAEwC`f?Nwv zt`+Tgr{;NQZ8my4=o`Png^dU0IPb#;d--5`wrUvI!v8%Afo}2ef46BU4%xdZn`dAW zrdTpFJB$U)U*AsDrd@P_feUn{KURRteXGi}4>=E;7JC)fHfo5<4froI*uG%(){|KD zc{&u_>JoH_T(Yjf1S3{1N*vv8RbQd7jrrcgiB9o0%UrRw6Ms*e#prfLF1r~_wa$f@bA zNT~Sz*kiARho(~W$K#LHwi?qR??VJP>=`JF1g65G5^Ta7>o_x~XQZj6>C(Wh78f}2i(&PleD$3Bxo z=Z+i7rj~+#+uTM;p{N3wHu*19p;4= zKM&Wk08v1$zZ;;o%*{7T%|LJ%4%q%&-ND?{UhaLQ=l~trj=pU;aI$Uc#;w|^*a^D+ zwlIA8s*(d8%zZCa#-&$>oHmP5?!cQl%Tgyye&VPWsPRHs#QR}yx|Mp6X9d07mSuH7 z@zt?2_;Ad;Wh}`FU>HiugPnEMg z+>3)lz$$ISwrIpL=E2evU`6H504t3vlsS2dgJ&@b^#o7c6AqP~r}?e&I^GJhRfh2U z7ZvSa=KzScPjO_)M(`HH7+&WI!9mRl>vQc&EQb*>J6)6;+~{tZ_#1?Ya%ZcRsuh>Oeo+Lq=5fvF!XNps=liO!AJQ8IT+ zr;f9E-glKP45AC@teAZvLi)u^RUUIKrbP2)?cYwpJlkX9tR5imf0;F;*}FZ&ui&5aFvu;^DSrGB!~><#I1jXxXxidY>v$-Q&rLY z9b5skq!&4a{HX9InjpEaN%OOg*eZ9-wR_8+lZV-<&GRoC6R0|LBDQHUl@WYc3~4(v z9O^b~jX%6po~0qmIOSV4KO8=?J&fWn=v8Dk4~`ED*)Y{S?-^PaGDHL7-`!e8d!qD}Wq4uv zXC2n1eO$VX*0q^YfamdEHYn+5b_mR|KWz*i5f({gGsc{_BEl{^TjqzG^zr8*A1aln zXDoax;@u=(587+T$?jUj-8XE%_qr}b?ne^KCZ2GDrJxBU+Qtck^noG3RqVd405r{v zC?fKqQnm9iTRn~D5NTk0J&oA8!>2fk3k}N=^Kb_jYTJEcP9FXx!o>zP#`}+qTJU;$ zAwfuwJabCAec*(+g>dfPtqDjShNuY1RJ^^I z!b!SzGm?`Nr|{Fn;0Ngm$!Iria3!&4?}i8FyLq7UOAni5&c{f9$gt?I(9d5?Z;E&hJn(@f===1#~YXtV{bqq;=sBK7J zfL10xYE%{m`|a1Jj2Ux=-}Mj_?P-(Wc8~Y}xkb!FA;;+pw3&J;p@qs1Wo1fLGy4vk zkBf>ed%%M*J*Y41sleUqWHz22jQ}jCNBjrPLtpWs@*~P>YyTRu@Y|~of&KoUS==c( z;a4xFNSWRz!i6T|I>829^DOk23yyj%^n2Q{U*kK;Zk0oJs6Yfd6bG*?1@&yC$q#r1 zu}oTYD~3b&xSi~8TOQ%sn;+L6ea*?AE^L6NM}1&ZQa6r9==j7Di)YdjQ2E}+5*$9R zfN&~Y`qG%DBv^fHZIwN{YZITDMMN0#M%=7DcrR`%*v9D*GHv<-oIYW*EOFhky?KX@ zP{k_^{$=fgyG48+h*jBPuW+vNIOv^Tgyy15jbDR8JzsEzqeSA7RtRb~xV@Z;x2zyg zR9>B_XvYYTGKD@>j-82B6H?)~tGNIAszY>pY#?CoiYR{Bu{@#hYsIdFC}GuM+r(8KF4f)xWQues^+}Y z;Q+HGcj2Lkm-Ha*G{u>J4*{)L@m))pysk+i z;&OP*1I;L}cKdI=*M=KdQP1Z}ca?3JRC!S#3=&dt0HJLO@p!~aCOs;XNZT~6;mX54 zFyVK7w&#pns1$~@R9IL!<=OacVh#dB&G5Jvs{a%!uCo`8V5{U@_kn3W3$X(czfx{T zeWANa0z#P4BeL;E;UMA^=0?1bSH5-GVQ4RSp>1$@Soom_&PNh*{KDV7eS;K=Nl1mn z?bSf;<4&^?S+Rjvl>(Be6aRRRL`ajwW-C*M!^pxmg3KOmz11nuBmOFYn%3zNEUZ28 zKC=k^Hl;Tr)6n(?%XFY<84_ACR$(B4U!?ow>Pk6%|xeM zm%5I#CL^dz;WdHKiX_bOP^>ivFni&peHDDZeCt2>Vi$0Ell>7kDKpyTo>?rQ`3aEaM7almZh=BV>9;+Q&>QyuPzN z;``t&wrwfSf4Myq!b5pQ)4>OUeOWt<`AoSeE$uE!88&irQ1=LTJKU(g%Pwso68{Bu zwz|V9a6b6;SebW`e%!SOJ~E*yk#**GV}*3u*EDF>J5O z+AyV20G+fadmqmW3wH`|Y%PK*5v$oj4JrJ+M@|mMnaMM1lb>fIi;#nEB9S6ExDO~y zl23@QRSL-mo>hv0*>qsr8om+^RNIU4xl5;tFT$g&ntS%u+rO?q|FEMbk$<6lwvtyB z@aXG-V)PzAG4XUATNiH^$vwBY{s&YxX{4{+@=>|R%hZ1ILF8MwC#FEW6lqk}GI_QF znagQR`nyG=fELPOCQ9ANo|Sx4)e{G_N4p%YcJia;ZOu-^13Tb*U|O*b550gx>=kZN zFybe$w3HKjAHDDckBc(UHTU1?t`lo`SJ06nU41gcZVntZW22~(8re;Dt!eWV`c`Z+23EYCn@o$& zXccaG_d;0}>Ak7mcxz}S{_*47pOiK276VRyy}(x+yo6L(26{0U?X@%UvxNgTJGef2 zaeHn&(PDqkSrY7N*aE47oSgy6Hghw3fhD=f-k9*wcKJCPdA9bJ%MdC5O(NwHA~%Q8 z38`LI>gT7x&Wh}dvy40_PiF+euFjsJE zB;0@5;Vv11xkJ!j%y94Eejj1}Mk)h)-Djzk&8U8n4p`&VLZgFF8(OH9PHlMYG9|O2w`vTBx1A81tZQ=(k@F0vqc@6Q?6Hq%u$WR;YK# zA~NeZPEyjY`ITbaiqn<`tr4?;U1p^dTro+^6{20Dx@E7$ob;E|g!Bic;AK~{_8Db^ zmOW!n$aiCwukF+hHzt zQD^jxql~v4=xM%em+BC+0AWj2(iL1?h*%Xi3)Lc^xN%MrEE*M|t(L&9BJ(E7jdSIc z6oL_;ui9a%P~xe&{UO#;-^_OohiF}t=pC){+cjs`xBZlQg(!|>`i|Nlj4nk4TdG9F ztREYN%5a^l)F8;TutyhjK$5@E)ekyrHANLvRdH}P*U{M~G9TVfAy&3hG_kTr44I2$ zO9lYENQ=yt`+!Z6r2uBv#Qw&GcxaA9MflvxD;S)G#q; zPFN5e5n3dQjE)(g2v53&_RItXRlS$q>AfttGXrlT8Tg9BTP$?r3qBETvP2ds1>^0vT^hs^1PYo% zhSjHibLLVQ@j?0SXV^0D$Dx%sNV_ZRxs_74ZO5``8PSBusldL_ zYztX|@{VQ;HU&hpweJ_x@~Uj22w`#=EYAfG$`dNUcVbJ{8ohPwxCAOb)`|xRzsmtO z2FTv6Uu;9AxPd~c;(tMe0<^%ipH(y|mB|s;X{oefbfemVMf4#ZeGxE@W!`U)3$7-r zDfI!Vhyuc%^f2Fh2tn#Y1f1J}YkQsH(Vh^0mKoV>Y!xT=|F$*S&cp>n<}Sy;wFk<% zH7^T?rTF?L4W%g8OnPBd>V+X5&dRN-ZqRWmP?e|<1UkSL!t%tHqRME@iRv_k?1mc~ z*)|oK5=_*?8UB7?j7hK1v+60}Iy^{lhogfo9bZPo@(l4wq1IUvKv~^?Rn8BwKoCS^ zQQ;}DvUzOubuXKZOBaIP3WjTiMl)kIjBrfqv^o5)ZqIc4`H}q-S4F(rk`@{zJBg%S ztYEdF7V5I~-j~}vu-!Eg(pXi976YD@Wi9^cSCM01D33+8{c%hRL|MNZ>Kuf^5qfut zVBOda@PHGxgA>#-*rNV$UbV3xrjYO=-fVS z+o_@+^f`&_ZYD$8b$8}2$^|zq_8l<_g>uB*+!6-;RYgY!IBP2C|EM%oq1)MMZds+W zO6#HzmU?Si0@ap?GBNi?H9gACixOm>l^6+I)GCS@te>mmX<`zn{Z?Y0*yK#azfgBW zdE~ZA*SS#i23+6T&0@-E)bhuVvrO@5w|7$$FQCm?ivv5}d^X>uVy$!Tt%{3|7++8! zS*1d0tj&E&)EQ5l=?Th(pG|vhX8v~81E@4J_BOS{2}?@w5*7TAPB)Mk5n}?Yv>>J1 zNB9~wJg^YFUiMZ@wH&H;;qz5s}!My!Tr8uY!qsM zZQC(3tCzEudqV1yB%5-bt* z3Qp0pTUuJfS>+*hr>g5_wlZ&fMm~cxQ6VINe{b?dd})FzazQkkjQwz_%SbY}s?!bx zXc=-TZ6NFRx)OE(R`4X$*>xFaP_foq{q|Du*hV?W7Rq=?LS+&tKUU0y85!i==FM&n zTcjB*DifSKB?$otC&>wsZO`xou$^fl#L0zwd4|R}m%46$UYp!PwPB*R0J2#c$g2wM zX|!Nj=kBJtw{U@5v*3X(#>~spLm#ZtC%M&#=*Q>4>6Z4H((o@As8GepnKCpA>%Xj` z9EzJSDjJ%yku7s1#ufSx2gGj0Tf(lgB#V7kg~YwT1V4Df54tSG@ghs0a_YHZb)bzNz@fFM zfhx^D7S%FE_av;(WV;vXlu+OyE{1N6QQ)@SZ;rzw*Bw}%PK9kgSc{7KF@**g(% zJ9@A_3-6IJsk872dEMwE+@_`8(Lsnu>tyeSPQm9S>hOCk(d2&rt~PvZw;KZO_#@x4 zji(lRg`ZVO8SV+1I{NvBe0h60k=~wSU@*}ACi6Y+a{xT}>9+K(mWs|71tTO_Q78)_ zS`CR1i^whtY17lt6(m{j^PZ4-^}eNq86LUf=8TQ5Zjp^s1An}tz*ck_$pL3iaNEmf zg59A{5*<&dB$tqCr>B)jXuJmrmUqAXDbdb6tLWOumY>q*Pg;wMxldsS%CoBB*nO!j zN?FCc^*&-g6B{h`g`k5iZcR{W13ug8H19E3Qx8^vA*vrXOi#pXL$pIUIOMxr_Yd0$ zk(wAnaQFudZa}{E5D~&P%4Z=bdaa4=Sq8k16`qP7uJb&`M5nDLGS*X=mBK1^{g%;r zs~nKi#;T>OMO0X@`BBrzQRwd$tq9GwfzgcKZWuFZ~iqUoOa5ccX=$z zcOtortizWIRM??38A8NnKnIDb;~uhroG{tqS$L+9ggt%xSX{t927R>BiIwP3tC%ez zE>{jR*;Y7v1^?DP+A53{Kj}XNd#$Lc+MIu)}5RaBY%m8?-97`|<==p~!k| z0~d&&8@9r2uRWDorf{lK=)lb*YR_8RfM?+<9Qa9PdNj7Y4}vi|9u-1L<58!dgbWp) z;w!OFQ6|PpJ40K6o*7KOn;r|0veVCLuf#DR1TXZbY!rFm1UqOmE-Sb?YNNkhw4QLi z90XWQ*m+)F&(}}+Jdyrru~-<&uNmYFSG}lJ-tNk-IRN4W6RuHM=<_3sV?s~zg+s)) zyh^PB9<12aI0RRITg0A_nHA$iIRc9On((cugy(Dqypa!;{d0wvn_xAud(%#gFYv)r zL^2Kds#UOVh0hkKdycS$Rqj5Nt8bmJw12p5ctP2PPd3#e5H7SChM6AOw%b-H6BdF( zg;E-xnp!I)HVV!P(@|Ew)~9t>!ui1bHlxybf8)Xwt8_Yuu&a7F=8*Bi5zeaA`HCl| z;$D~(ezdIz2OK}F0#KwO0hP@>%aII6WbK*BW?{}slSP7`k8w{m;mxv@^>sQBrp!3K zy0IF1rOldsGVzmQ6%Xr_iD9S*GXoFd;HwV+VK` zW9J}I=*@->AQ?a^&)5xg;BD5aF~%~^vh;VId|oe3@scxrMFYcX<42otnx*7k_O!K>r~or=14Hz4g=iUin#4`mJ6kKLLJ?kkCgnd@ck9VTExX)^)4{x^H%b<0S72CjC%k64F zLA3|iqacJv*fgwBlmE;r_&Roaz$#um6F1A0309eG=(_#717lVSJNrr*MGI|=WEe?; z^qfU9(yOFGF@XB-Z!u}%aZKZc*sLE!6aj1oWZtniQzG>q))6M}l-gg|zZnin>{#JDa~rtdQ0)LBGr73{nw zbRU5;maLNs(v>!KolV5;bOdN$nwi>K4ijLD2n2g?Nrbx)bh}w-1fcXmjIXo{VoVh# zbDJ%%ie@_CIW<=KLrODvSeUHy-TqLGoAqX*+(!YRbQ+-YrpFkK{&tnR3=eP*u6g=` z7ux+bN_w2MaQD7XkH?SQPgP0}3rupW;MoVFn|41=ojy&yRIjC_fPQ*EFrpzJRAIjH zG#gupY9L+73%_)2jW8nGpj#xZUFJ{r!qs`01KB{wmSwm92p5Ym7n+Iu_#;|4Jhf~v z^8QhJdfYGO{%KEoI3nzR7vc1{Aw@tBKlfKBl5%>~ijI_8_ zrE|`>GPj2kGt>xA7Y})o`O5|NIpA8c=>Z8A7E;dN5^50T9DneN-(KXjD7St2*u{T9 zxa7M>)hxAlx>jt6rNf?F^8{vfxI$HE<^5g(=}VcFA(RT?Erp#$T)?=@f{kTqDu`!5 z=Zg$vW*c>1?E;pAEv|MT>gb-kc1h`+_&Yo4MJQ1AF3cF zIP6s672Tu87qjb-;q4=;dU!8fcN`>L^dbfu_<92>j*Dh zT50AA3w$y7sDrK;XS^rBhf^ zSiCY6?tP+aW0MiR_lr#VsQ8DAE1C6j*KW6oa1eZz{Frh|3`sn2ok)1vVIw$|-EKk_ z4cWFrNK#z7#81JBhf+$?SEO(Et5MU#sa}mzk}Pc+dnX%2Y2R%%$4JY3yZ=cXA1aGb zN-ffbre_~DBxyu|s#y~z-VJpyJEpGE0d-cE%lpwIt!&W^UPx4oBoK(<4 z?Qmm?z??>3h*hM}Ig>W{fkLc<{7yV06FX=^I;l)eoi)+3Rs>Ll3Wqi};(=)s0={Cs z)pp;D7oKQ$VpS=O19R3T3lN#v}a$FIKJzS|@sy z@|xTzD^p>TsYC6Knc)(sz)O6~NCmIVaF)BLIpl6UNbeBi|>>{h5?beSybEDx51nUgA8x?|W=Kv~pwK{cLGPIFdcWju;J z%RxR>=&7l4hIf$VFxw!>OW@Z9EfbxV9lF= z^COkc*=APoqEw+dSPLMg>vTgVF=oS+iC{5N1XcX-IAWu|WI`Kj%}A5mkmVakCV<5JmS8#>iynzo?&{C%HXeMHe_p8Kfgo zIx6y?q*FI4RP-tzrZjyt0jW#P!dstEC$4-hjmFJ1p$*mpy zN9Cip3U5u5)Y`X5tH!pEB>A@v*SEl{IytJ`kVkdLsdGH0Mk=`d#S7yE0@iH7-~v8~ zU^Q;BS>pCd1e$miuI{l1Qq<}+>5Sg?ZGYOM%TZ${&!$e7@?{He(Ui#zvDa7PzE7z0 zktvg;Lw-?qtA(yVanmE@-XSkbe8@M2=!F0)H$lPA2_mS1GMh-hNN& z=#DX`NtU7VwoR}AdS|t5Z{52Dx)w?IDF{jFFKbM2gyESl^~6i-KHgB2wzXNCU$l!X)q}Rf7m{*`IG~l%YQi<%Y5up z6kSe1-|!tw61PoADZ-s@+d}sQOF$ccV5kf;b5T7dRKUrrR}^VuICR2LN}|@cxkeDk z%cu#SD z&7s9ODoix@7b$WX7Jf*_ATcRH9%nm7A@se)NxLR{Vu@O78a1Ne2Bx4rsC8^^a0`WC zF#Dum2HvCy!D7*Sq?*uldjkgS(Ol_->$B}2`^F)6nVVU)94rces?yQCM8DYV4=P@c zX7eJP0TC^>q0Z-b+Pob)ZSP5vVjxbB-~=Z7B`+7)t-7WcWFwtB0oCNmvF6 zlyxa+hv|e9A|`1ae$9z&c?UJ8ypEYOkpdk>9xT!baPz7I4nMLYlMu3~2RrN2JQCNG z^nm5qO0JU#DXfg5V1$pa>nIFSrG>cve_0X$X?%mkD9K@F_Na8B}+K7 zs%?kC7ZIv1TFsP4WJR)Qj9H`R;JvP~>LS>PIYOzpc~s52?2{zd);`1A77#^x*zL43 zuTHE1HX~fvtk`@X{dU<;NPvG<4EQq8LA&iAvBYC09qb=bHn5a$3tiL9hyVR;ZULgv ztl;MDU#M&_xX8vqorYtZ43Z0o25@Vhh+<|(3SEhC;)W!Qt5~Zq>d8H)9--L>vQat^ z>j^ZX-Q|?30Ey3QtBvDFwW#(BXMKHplJQ=NUdBqULw04B*_DvED%KywOcjQbpcLb* z;N;s|X^2qtGs{%J#_GW$I7I^v?L?Xy+B@7^Tw*jT_;pesLQQrPn%C7QninV)X(IX+ zA-+XrDkVlg8Nw>I)9PC|HC1wW^VDQH6aX=VvIZ2^@Du=BG;!T1pn^j*CuQ8)ubMK2 zZ4^081eU&IU?L8tv9ndBZgo@v+K^+cR4id%k_8^wig;VelmB@;W&3JJjoF4}3g#^w zE!wpXx*pF>($ss6tp2lKNRF9RQU~_7SbT364af!p8)a)R5)rvx*aWc3po4}Dh{S{e zIVWvkyU!) zl^;un{7f-D8%!DXBsWd%tTIt=e4{F4sOmy7*AmWnJzqb?i(@R*?9zrDQYuUe9>u(+ zQgdE&d9y7ElW4UUr7#O1#p_kLp$_(3RmdYVN7DztGAAva09L6ttboyGpA{}X7%nL5 zZNczP6-~+a-pP`N7F4+IjAnb+^uTPP?aO9t9IBidR+2?z6k@QfN>11fsYK8!TPFQU z`XM56*89XvoK`7y$ZEFj7LjqlewkTJC))Gbv$%*bwOxW{f&-}5F+1Rd`mwqSfjJlD zvjNRAZLBNc6I(QTnWi#`%5SDZx4F$8ji)?fX9PaQDw&C`6#IKJS(`G1MEb{TlZQ4N zr~VNoX^NUUsWfsp+Y|GqQd;f|aX^`3`f?YOih@MR4iW8|8ff>awLz$4iC@ZXw=1bq zW@nT`&begTm`7~8t^G{LLt=E}UJeq^3j>juPLb8$p3_RMLaIl3VRnIqVEHUoiD@)WKt`dxW=nY>?my6XXxr zqjotskXhG7)0~ja!ABgTsM*Llxb?4M>Nf)H)P(a;OUOHYH%PO6=|U|`A#c+UJ3~aY;E3);GEHtpaz|*GIA&4#-J}Ih z#8XkBk%eXK`9E04R&jpT)X!FUrme~7quP6+Xl2^_YnM`#QD~l%Ku<@3Rk(`$>oA&6 zRQQ!i7zFw=VCa%cFDl3+ttjC|9i7d2Y^RM0GnIw4H!|k6nUnogLxPhM6(fO6kd?lC zn>jFNiMl9+-~-tFy!^>q^T0SnLShA(nJU|N7OI4bf{T+7YyXF`R_>6!A<7d(Ape<{ z%`Z3U#VU)w&0@o!0&!Nq_*T?+QV9ybp3U@%1rNv?6A~em#aKd({C15^Aw^DXCTdUo zX^^}a0pck6tsAy?WG)mZwbj`bGM+xaYq-Qk8XK^zk!sSvI@LWYo**hnHqDP9m!a{q zqRB+Ird5t&DKy;l(<44TF)I#f(QwAr;4qW>bIIwR)prAxo1 z8HlKIpAOsDYYI%B{i41mb3&_c=CRGj3)Q+0`U%pc)yax(-XE$#?QlXQ^$P*%_b)=< z*DH6Nv!%#1wP=;2!3TnF7CW&{a(G{r4>1xC?t5MOu|r~}o!i?9$w`mQMut`~_9Z;1 z2v1a&V={5UzIhfHL2hl6A`DS`DD&C*H1ix7g0b(iUc#h^t96}qI_|S%R(aAHj+kW^ zWZH2)FcZVN-m~<;M<&Eo(#oI-q{tOQy{a-&rMXt;qI~;9g?=M;`3gM(nm)iQM0tb) z2!_S086V1X7Y0CeLZb&^ZH2hT$_xpfAEFOtyRrVG`R+&`dzE_UiKI}k=~M&}GZJUu zwmAb=6O6h2^fV$L>-*=UUz;>a4>MB6nz)BiU2@hyQVfxSR1!#{ShpC6Kmy3SqR{q! z?P#_jMGSQiIm)@|!12mzdy7ko3j4iBi#EE}CgKO~?fiSGxj}gU=N8JRjDyOo(mqbs z1|Hl&5t}QDXraQ;0$CC!+^X4f-%U--5}>Pu#8C0JMG@O2J`&OWOp7vZ&UH}|&4%$V zq1Rq?+kl|QOEr%{mJ*|_8IZVbPuEttA}O^xP!+C%e6i`hU7^(4ppf0h9ovMAMw8)P z1B!9k2hVow0?f7%l`I=QSLMKoxLH-^Y7eNG6Z+o4bjeCEH)HIquos0O^V^l_>x|MU ziQ|0d>`DNuSYuhtRd@k4M)FiZDbIvT${W0}d;xo2gythx#SQEw!Ubp-J2g*e?kOm1 zvqHkSDCS!t+j8?Qv&$)Vmt`p`s4(MMdgsx=sWeN_f`WlN4v%nDI8I%Nb14hpx86kx81y60nHG_zHS zJUb1-nzy2h{_k&b=xDq+Zr3KV@wVGbB=Ef`o8t)~WW}jMlMD$v!j@IB;wmGiFN-HS z>)gkT%M*LQZ&tR*uj@w<#FJcaKfYDw60$5SYrEL8a9u1hF}Tt82@eUU5!yFfYJ=qi zi#nWm@SatEP9&@3Ea$EXC13v#aaO6F2zs(}lhhm_G4D0oG%ja^@_v})T1-l;P&Cgg zDpLIWTb85leCvEFo}}A_M2n{ggK&jaNThK$=7Gh?>f0R9Ugpd$vf6IV0q4?-;Y(v^ zgfLOq?o2kI*KSWtiMD;qH4XArM>2F~Rr4Y-nph{BWr#GVh|Gsxva~hAc!X(kd8*Xh zO_V5YAa~n~4JJeU=PZ2VU{AlRWCi3HzVi#9!{X?#*RM)M#ADU|sJ2s#cMMJwq9RHkitDdJN$>UbwsIL)fCzhf&j z5LW~*g#s71aK(TF%teMnXK4*vYk-lpwt9O`DBl+7dKkD1EaK_J8)KXC*on*o@+pNy zUuF%O3WJ0rwI&RqLuWiQ!K@-o{>|>idh}pEaXLDIvdZGBO+}*zY9}KWNletK+Q!mC-~=!` zLkHY7!lt$7iXLoVVzWRKPP@!$B@L?a73CR27n7n0La#V+ACN=pe4qynsEFTjBQc0; z4YqoVD4E;Qt&FJ>)Uk>e-kI(A+wY3goC&(PKU)Zw3bRd7deDYkKE>8aaD*rfwNNCp zndQ=NW&C8V-fyc~z1@ODScs0qs3pSUm|SHSWilYKk2>V)islIcX}{8|Vt>G7 zs7pIwsbRB+zb;RI{`py_kOEaatF&6C!8P+oX)z^31F2(=H0{WU=_M(zH8x7|6e5>V zAPG~9Yh$7#%GegoC@&HWMCLSS<*0S5BG4M|D(E>Q7mtsa7(455HaGxP+UGPP6j%z> z8179mZ%!-~Tzj*#l)3m_NepO|x#(I8^Enr-{B~Xin!+xuWKbIOSxDr!<{>aTy*_F? z8H4_cLOj=QB@ksXD|T(896npdl```TSFjJOCUn>w3o~zu_7IuOvZ&jqN5}*bH4yVs zc#^*1l)xfLq)ddOrVMQEeC<+k+F`#YqdW_Bld&H?s0zU5dghj35pEL{Xq=8hf2h;1 zGG2o=Ps$q&V(OxGg^CN6$T}NWg$h6^gkCAvN%$H?KNJ25`yGmC1rBTsY&)4sj{F%j7+3^8r-#ZW`xE5Jf6*SR}!44>du?&I&K`(@5 zv7yS%HbJnou2ER+VNoJSBGT_D#zySa-UH&wiWouhs)>U3qDxjjBcRB6y_6R@Cn~otfw8m%c*MkmFK?B^rGs4$yeXa-dLfnKPELL6iX^gj~g=>%Dh+dOT9(UU_{o zIB_C-;p-2+$b6W7Hzh7T9zuFA{E|M!pr5o^Oax@C!d{rTFn zI+Il>bpY*dW2>`>1f!dyA=Ll$T=nW@Me ziKbz9^0J?loo}Dyu)4|!kd`L-km_d-vi`1;G0=OdWV63YqH4JtNK21bfitly-iD1b zSb(}HW*<^GrDt@m-g+{dR!d5$*Q~q-CVBFv$5B;DCd*n14C)|N$ys^3k2=ZP6BTjq z@Hd+ymD0fp@@%Ui+U;~A7g&9A;m})m`l45TWTch~t0?Vd-N;7Y3weQIq#n_yl#0Yg z4w7?~99?`~oAp9R1=O=0kxK5|vmRdc+*olYcq;L3WuLX(IV~-HWfWfT9)+jRwSC`y z|MUAV(b(KAicMd!rLK3|lch&B_T+c(g9J11v5|9dn=G48?j^BR=;7{VHhY4Ej!y4q zIerT>!H3&5aS5z%QgO~`k?eykvFpJr`TN_{PQ$aE*deZRrV4^!v>9ux1Zg}F*|*O2 zykL_|)bmNf87WRgHgblwiMopW-NNtmxS(YuR(NwmgY-B*e0KD@rHHKzydi~Ns|AGA zZRn9rd5fo98?BMWX>xaddwMt@xi)sq<18iX*a{az)r@M(OOy1YISM?i z)=h(zZ6NvC@Aaj)br2gydl~0MKc-T#OoFYwzZSC%i7`|-b`kprF+lo@F{e>c1v}J0uNc6C6p>Gx)p{*KqN^oCZF+AJ&4QJIc4@B^Mn@2xoBSzSVCBXax`Z7 zzo$4Gx$ZL{CJt6s{iX&0js0?=foQ0MI`FyZtbtd*8-2}}{MSjGez?rEeW5Brr>-jL zJJpCd*Gd(_R`%N>%Jshow*ruk=nT|ZY?xX1ISRwDf2CBo?Z-%ml=f7M(0>n{XA0sP z9!f0k&atS6`4O@jmVhiao)?kZqVJ_{!{2MJl z36nI$jlM_^znmF0U-5ScRbVSD?IJrfY)6fQEB%fNsN3L-Kp`)cPK+lw!fdpZIM|*S zrf)UDsjw(9QN>-~7Qryv;hIHU)$mw`2J1h-Nr*hH$SJ!Kb#1(kP#*RgR+O3auoC0R z!0U%$hqU+B;hbDxuVfVplcIWx_q+X=Wsk7J>#P=BQ+$6E$o6nQ1!iL)ljJC0NZ^bS z0VXmOy4FdhgM!zM7`0giz9k4Csks*Eh<`{WH3wmoUx&PKn^g08LBcGz?(p(QSJ`a|nn!Ees1voriGoc;XrVGS zrlMVT_Gp#(hqRstS+nUXk`VD?z2ZN`bDdDqiN^a%=VMb9*9E zkP_A=O5PCB5~ z<>-^Mw6axUweTYPsZ8+A5s}ekt|SUd+18)E(|e$WwipCKBD+1V ziEJoliP{;}MY^?YChhc!DW;0MGVi#m2Gj$oRm6Tlwx4aY{j8=yvYcwDV?rb#3dLT8 zM93x5w8J3q81RTpMFlWrzCs$9N@x*9`F*Olj05zpy%a*S&}ig{u!tSBF0oEo1xDDk5aUjCX@ z7nz_GTC|aDq9TgzGN8A-?U%dT%Tos;0SFW*b6Da4bCy%49y|3K6u-I;NO2Qa4Yli2 zQZhDp6V7zVH1eXmG9=W!=xI=EP~u?ssbR87q*&Y3fJmUYq;=Y`E<5lmLWA~2{DA!u zTjbWPgKA5n_=|Es$dGp`UN@9`2u*RIAf2GC55$a0g5gUkoN79oJ(TUAYexBic2#JC z9$6#K>{Z-j=@gK(_#%xPV9B}!le(x_UG8fsNsuj5(Y(0VYb4FbHnX;`-^x(pHO#*k z8Hrg3bK_hTGb6+p zP%$dPhas0!yp5c&tFrkS8x1fQ7G9m|KbCyP_X&;-GeSQ5#kR4gl)7`5(S)NxaiHyz z5l4fKj)-|_Ou%BJ1HCiRO&lg*(gGA~*at~qrV3Wp7rF}eP2hgJA!yFGVm+0QpEn%x#z+9CHps5s3KFE1jN4= zm5e5PSZoQ*@ITmQNZP${F+NyI7$*=xlgF5+yNQ+yDyIr?4 z^tnp+F9cg$Otubn)=|Fq_}LQ3MeHT>hJn4twXU&rda1&7E;RJ)v#vp4a>i}t2HQ;m zYRzq-DD|%01r*J`a!E4$ix_^yZK~V{B;zjg(Nam0jeTJ2>51W*d==Ki;fvg ztu!e1RlX*v!IoD!morVIvbid!zz7}v#q6lrlP?0FWZ>3WYb{yq#6`0V7{#T&tx+58 z>(8cAGq2qrf;*E2bKbnlbme}!yCTjDz032L##ZI)4?hgx>ENxrM|}ue++Op&BVY5! ztC)H4)}6env(3x1EALyM1Ah6=Y^yXOaNjq_T9SYDP+nOEfk=C9;guyyTXl9`R_!LHQ4{#;)#lKh1(rwn+Kh*ypd zRwp~vycXoiU(`~}yE(0Sv6XWELWMujK`vQoPW^`3N2!}HAG$*wU<}12J*uYM};+tMWsVa`&y(>9KH|$myi5t0AC~}+l-MG%Q`!cDUpYg=hA{1oek)jWXRFVW zrghTU+?B|Ks%;-VVE%$nIBJLYYfN~2fXl{9@L={fA~81MF20DSEVW3tkct^0$d5}b zSzaSC$zK>f4_X)tIZulXf>ffQPY`l+aL&w+3vMF&(KZBox1aciHP zFSB#Z3w_uu_)qj>tRQ|CJ=0{BgQ7ezSm)C1dWW^`TEZO>^0(?6N_kVAiB8OT)UAo1 zsVgj?lTV!1q&V{lammY)rn2o^VW#mU29|SfU0&wKX`XpzYy?8jq=pf7>hGQ`Hl?^! zhy$p^OSBO^QqHUkMrc+Tmc8mg^-6cl>+F;B7ZkLE0)D~<0#%Ya%XJ1;q}t~bD);It za@2nE+1FPkL^sd45_1YE4%e2^yloM4=bdzEcN~G7t%uQD8UAol>4d=wRukcxY;ttaW0iXQ1E`rw(6;pac}w9oVlLv6uV; zbo0Ym0Oa&@%M{7WWMBCUIgfq98$oyp^a(eP$gN{nf{Um&FVqI+FBsfS%CKI<^0;b1 zfL%7_)uwOv!u1X=KE&297sc_+Y(A^VKyYSl8X?xqAT)b;5IS4gqKpc(2UZO>n_Nj( zTnb4ERdE zCVD;kN>pwHYe80R$cvoHR#-L2YUg(OB8}&cWr#T1*eP{HQ0U0MJhD(O1(!U7htlSC z(Fl2+JAHoKMj0*fvt*y96C@uv7%AtK`PO-1hCzPZh%&(j9!K38u)keyPu3y`4SJ!H z+9Y)1P^n_mqUcuSc0uMXIfUtbV@ukW`%{J2qRHV=s3sHvi(u%P*hK(6K*GNuGF{Qh zwD*x>Bn0aP`)4Q0xl(5c9ok^SSxGS$!Ezl6(?tIfDg?WP!Uj@FEPsW}cBDA4JM2%z zTqXC2=Cg|cBbOWz8wvPaqk)2{_JDe}V zak>zLMtY8T*MZswCXhC>a9n3k)FR&Ib|c27NIY`K8f>g` zvywA4MD(s^Zb-)Lbiwi=V=t#9@VqR4Q~LvM32gu8aT;%B89SU51W;j_YAeYrX77HE z9cEOMZD_c-69bcEuk6rQc`uyh5z?@nXpynkQM|&y9?8)roD>H6{Y;B$;H&9b&P8Vu zq$H>IuV{Zr$p5-XLNgkxy;DvU`pKHor&U%NknH8n zCet#Lm{!F{>0k{CS#j>Y#bQ&hn@y-a!q~03M7FuRg_iU7B#%LUYIYDetE`5Y2F14J z#+C^J&+hmv8TQbl)dU@N?Td#&U0z9p${55n2EFx##JSTYyV{=F3}HpxdZC`ZrYYB? zB1pm6XCffAf7rxu%?N83&e^*4ifnJ*%@f2tJTW-;vKfj&7zx}*E z{rN3^f$+heu5C67`{nJ`&Q2Qhu;Dp)`~g+;$P?(`=|>4$dBX2{6k^)g)3r0pta#Ok z8Q_3#{#6R05B=Tmcr{8**5SL6e#bq-!$6gX^rI?E(IcONd(HxpnRv(mIDGy=zS3wD zAH$+SWUv0fqG?R}Zls-IqUV?b;3qyLzwTIk$3Y`P1AD^@=F5ud2@|h%8;HU^hs+~^hx&neV1uXnf!_`O>0-^mqFpD~2lmrn%&Iq@VxO_n zW8EHZ#W<2f7cBS>yb(U41H(Un$e54IyRUQ6k3Xy6H{x<#FpeSeaF7sF=o<$feaE$z z{lg1|W0bqh;WX-=qt~?erBIAkTtt4nh#u*940uQFsC;H%D|27~KR67g;(AMJ{O#La z-hdtIf$7D>A6B^B@_Jw@cI^(fd?IM->iREAsQ4iJz2`0nJ+*+juWv z81jGs*fB8kWii=Z2&V{?Dk0%zFAO0<#ZQ;vI249Fn0QWFs@lV>yJ)G$hk{ZOWscD( zAxEX{nLs1k%ns1LX$(ejedT9%96oqlZA$^yz&~BkU}0glmp0-Gj}ev#2Q1!#FA@`$ zjA^vO!4%sEq)gm$n-(flaS-^EYQTM~6o{0_F&`5`o3zM7qK zoEbx)vTNW7s}KtDyTq1%%-iCOrSi8U9DHUPyiO4Pa`Nb`q~eLQFNFJWP7&{2L62yS zm-k8@468I90u}M0ulyqDG+J^bAs-0kjNUQO6;>#``V%1+BM;6sQep@U4}Dp(c}L}{ zIQ0CA(#tXNmO&=S@;gw8I`HgRjW8YVCsEoI2M9#*yg6k@^vQJ|I#=9NyNabbN-emZ zm8|c`Q5?N<9q>P49bM_-QLl!UmSVVmTR5rmoh72B>5+V37N0rhE$PUG?<@Pry#4%w z8#aq-9vqr|(auJP(W8=>R6Igb?M}u8@t2+TkAJ!8MFxNNS;sJ@Iqb1>xhh(9HkWB}~Sibt)@x0m=Dp6*yPIkWa_F?d=GWEFLE?KLYQ1+V6_i)1ol$JWmQsWk&icR%5^H7E^b#* zg%Y+B?`6wH-)^UZb5~VBGIM@I!lpoe;xg78Idth;gC9X@DCh0i_lFf9iwVa^u9J9eU1EH5{O4^_>dIm0X`=WEHG!{Ki_T1S6X3st#_Z)^<9ljXVRvxkD zR}R^>Wo@^zY|arE9Tw{aR~(w+I1^lvM|qjKPrN7vbQn^h;Gv>z7Y96B+}sfZK{lUh zjziD?`?`?N;XMqd98khj#2B2SA7+QL!Vx_WW|R>j3#L&Z0U!n6|}qN^LE z;#1O|S@w?0V0%gCocQ_>c?kH3f0axiP71Wi{KElGRT&*Q@TOH5h7{Vh3YQ|xCOt4n zJdR5{UX`|hr8xj(_AnQdzLjNa!f)N~hs$sPm_)fW8*ZJ%5lm5j8FJhl$dV?l!{OQX zF+(4(6Sp1eJbT)=5nf<{eGr>QPnm;E@1AKtY}+L8fY9-Ye* z3?Go{1PdJgvBWVH`#Ffl&JMgcH9w$(n}Z0#+56;Foh0PeE#cQMX005p6<1&29i^}* z!>`@uVg`Kv+BIar16-@Nres4MNYz5c^{EXiQC;Fe!0ae|ylSs>jO=TlKYDo_Bnowh z%fqJ7;4;Z}hb)Z`SJN`E@F>Kh@Sa9>WQil4qRy*S6RH_TcWJl(`q$q-|M&`5?+&cc ze)d5Ve+Tj|p3O_8gO?(VhDJkgMax(_`Qcc^YCoG#4((?{x~qCH<~|&JNu>!WXn{XT zZPPt$oP(#^g%^4&RV?pZ;Kw9WeYN4Kp45IemD!3NSzuuc3@U##to08LC`r|9@ch#_dSS(bbc(DUU&3lUod&HR6kHq&S&NtcB4g7keYl9) zLytbPIO^z7!Te8s^PqvfE|Tp-yuyoEl*j{Z&*PSj{PgD})QT;3`oo#h@?hf&!;X^S zB7}1mSpgv--a)A;jw^K1YU)BOaQjT*dy1+F2kx#u!qwIs2qI-w=vySR!4C zqS%U~z(Nj@3iNWJCP})C6QMhI2`V)tfydAKP-G(CqasmBO>?7Q4U@0QeudFE^zkAt z&=%P>#0Y@$Y*9k_Q0Sp3SYj0EWY%$d2@-DJQR}H_-HAd$OmuWm#F<1Q*1BOYQ@P_I zt3qEu%;ru#8`E-ieYgfPJoNQo*BU1g9zZKXr&C}V1gXk$1i>U}5|f1LB8}53GApc@ zB|8pYJji1BU;n!iU{$9vSoXF?icO!%5U3_zP6s`_z}Nf4BS9hTuN}fB%=b%I zfy}{IUu9wli_tYocGuHJrVo}K6p_{G9)~I6bv>nGDH|mc+H7k|m}6vR-S*k346tZW z+IL#+C@Z2ur*$@}5o4k^niw6$O7}YpX3?Kh5qtwJFomc7pu;+q3^MSL!Ehp*0iLhO zq(=8o$U5|3c?<9>V>Xm&ZWhpsu%m=ZKfM6O;zk(h&aM>1VOqsnpR&$|>HMFY zR-zVr|IK2E+hM08Q#|SKw(qkBxtluexa3gaRcwysF`ar_-%Z3FcX*&E=^e$WsAI;# z)1h_40bD9dw+1IKNTzwjm9>%BSy{)!+_~f%L8&>q<0dj>UBwdXCcP=8w`+!DKN5MO zUp3HU>PirKrAusWm@rbognqk)Y6F!ITS29=npja!-M0>)p0bFhb(Z{9))8Z$>!U^o z#K0XXkA%PKz|;pt(;O715@ulovU^zEh+g5yen|doWcZ|)AT$Dy5CQY!mT!b<(e;LB zj-E$66C*wxBUCDFvqEG;6t0!N5#<;Rk$RS`r{9PsXhGGUs30$QxjpF-!%Xbq zlCgz;xzxAMBOpvY`oLqMgI98q9aRR)?^k;LuEGTJmh)D>{(Obblv5sLdDkb^JoE{l zEgaMuTPIJ_vn3qOEcN<+lGXBI_pNuP5-^cvcz{sXjLwr+NpX&YJpEFXMwA{W;+1st zAvJHc6t?5VwbNT=zgwt)z-RoKpQP+B545-yi7kPbEk#$i%Hq&3QGQ9u-oKrK#LbF^ z=dY`hkIE5C&4FMYO){RaUKIy+oUk{wX|-T+T!g7XPHZ(tdyddbOr3p1aT73Lq%Oi2 z&g#@7H_Syc>k0*+gkB3gT6y?Kgp^C)-NcYBO(=n*166=THabw_@G#I#ZsQ?yz?Q)N z(P0F*_H6DBIhTskWoBAM+UsLRh**3*3F+Vx5X71mA{^t1N-qs~f`~aaYLG7actVJ( zQf$P{m8Y=x9c>(OvO8DKIc8(<_yBf4N6)qhBn)@o280T;C~(fEFkAGy>pyQMStrLn z7)v63J=Ec#v3nM@m(&i~QHKK>yI38FDRGfURX?$3|05Nu1Qp&pN829#b~_*#pug(* zdW4fxQJJ8_${bwL(_33cv_L@s<5$D?p+g!wWdzvkFK*6^i5{{7(I)#+MV1tVV07h{ zdKmJ3JOKY(pTH)0CW*7L22~0Z`U%0dzv6d$@zvX=i4(R8;;PKOH`b>rkJm%PvjZf< zMx#PQjhTQ*I@*i&u{tT_XBsW!zz!#+0wD|gBJ46IyK;qTOGZG07@!ma2E0gG?`$bg z){WU-Fpc&!7S4dCyx01heJ)kTsQmj|`}tektn{o^(odj<|CwZeTH!tyx~T|eKGT% zIhEx(Rq}~}eS)nN`$P-G#zZodvkHz}g{Z2sP!9#JJ+p*@({*dKIT`m@b$Dou_fmy6 zBDNSKoU;i)e^O1QAS^K|gG&1YPoht`sK~x+p`20_mkT)g_~6E3t5xjMQcqPYBD=DeR1n-Crd)F$_5475L{w zLcD77(;GTg1t!wb&$Xud=G+2P$OR7Bgle#03i9VY!N?6N`vl#wLA*?)tdr(zySw)H z@vYqcg-=TywA)qoL}s%vEu=)h`lG^@^p*rypwZk zRqzw4>lZQA&pM8AQW)E4);l6~Y{Lj?|G{cYduBytK-x#Pj{(QXilkW}`9e`f5;qI9 z(YYGQFZy|;UgI`e!v3;l5GWE;S3h-YPYlL>-4q6S13NLN`h;ATOmUnW@pseTfv zX`)~jW!3{Lh(8p@O|&sZ;+(VH3BpJNaqI0*81r!g^#Ki3ct=o69#caz8UzI771<%J z%|;uLl2A&iJ0lsG;_p$Yg5SsnL+k~SZM%UC%yv+YQ8FYpn=hIyIuQWVPc^fxV%vFj ziY3~7{R%0(!mwl@{N0{7R`#0htB?&jR%#sWXV=I}>apm~Qh^e1RwdJ9ph5Pl)UZ{? z#a5c>$$U$$CUJ(gqnDy;>cE=OH#Ia!P%bEvL2{>VZzAG+&=Xs{ZwxG$M{6&c0nH{D zj7=#&jw)ja#DM*M%iFAE6E-fM?e_5EJ0RpZHz4kV3y6V>-bOo#pFt} zLGM*ia;S-mP^}I`>{F?bv<4PJr2>htDo{F5JZq5)-uWQ$YhcG5yNDtU*oLu4pcW#` z;!`GwUfV<10bxnIh%Yd*=v5Sc)J^2o%rnRX8iomKPNJB&Ex{|;u4x&_x2Kobv%%U6 z%rp>jkZde{+c-*S0J|EY`GJNal39;X#H+00ZwFO{IFxN6)K3+8#B9}r`kK_re+?bf z%xdhX*)W&aj^oOR@O@G50~yb3orlQ5E-XJs5KZTVn4*{|bsDQCEWPl(V9z?{sfJ!* z9Vk+qCi8G!#VERHU-Cf@Dd8B_**dXhVv%Vof31+rU5lu9B2@@v9hp3Q<}(^QasL`>Sn zD)!qU0}Y`T2@f`WPE3Q)0`KBm<^XxwDr70b_CUDj&W0_q z>??%OvOS#b6Lr3;q{vsr2}pQaEv^LCQU4~x>jq>u+a&ciXbn}C>KY~FRYr^l)eCk@IAbWn)F zzHJ6;_iQ{qP6Sz+A273;K{_0;Scd+pHRh3;|JZFAqgggBo(mB168B7@UV%zbnAp!A z!odS|Pj#J`>qQxlga)-9b{;FLww2R-D=b1#K03pi(%z4)U@Evh!iQCkA`oY$fmG(! z5I9m56qM|U5K<(aD=Jrl@%e3^lnf(_?1WlKD11}znY5*^SyjN0gYojFAb#k(ucASY z&g#GaQa8+-#P}O|^s^xHXY%BtJXg}!AN(RUQi;50>9L#>`G!{|4oG)|tLq=GPQBVE z_>Qla^75vL4D;*HmNCW!yb$Smeup!;e0G-x_B~bbaBVe*7Rj|Ml)oJIVRBEZ+=qnT z$I~-1G&uOx;?Wgsnr9uV242WTD#lVD1^n!-dJ+d~uM&DYumk z_RJ2yi(dM0GA7@NG~vpmU{E^n0G@mpfL6;1tdb|&y(An7N%V%4d!~?M{NcRR+LZZnCz@lU8R?&TM16y%JX(f*g-8Rpw%Kwlfxh@uQH z`-ro!p%*TxK^fg^YMgaw-zzJLzsk#Z#og(Rl;QpL+wXsV|E1o`$gEm62ygLq4YsrdthRNn)}~SdZFB+Ma)NTMQvf2gxWm` zjsrj-z)@{zH}?eoxk^8*d}@c*DhKtO1bheDoI3FhEkQZr-x7Yv6||E_RsKilq*E4q zp8V>K)UAYOZ7>wl{E!IkP}OE9uS3$1(kT{N$SQU*`-;WpH=$;&zlsWZ9+`I^-SFa& zLRavs5;-<_0V~<~+|&vkH4d-eRf>At+ch@66vr#UIFE8-Q$J<)!v%t$I$7p>yJibO zJ9#N_9H=x~J`LRa*ksZi(51PddPxz8COHD+z!8m(O&Tm6NvmEhv(El%lCRqdRwrtZ znFlPs1yYE}CjvYrSMgOiWCct-F@qive)mU|kx&H!GrzuU52z0h$(4x=VUxaIx7xK$ zNNN#q51mDe;ZZESJhAcb5y_}=+mc4c5?h1Y0rh2Tk^6WBpT_QuTe$*ur8IPno8tu~ zgQ;MY6i8)qmM8(G$?ffqQfeZ=M-99SE0A|r+9d$^RSWHp!(I=D0%qwD+0J~DaL+7k zb|l^iDJ`YV!>wPKk+J&(bE{xryK)Is}ABb5H_cM)3D>p;Zm1BM?1jQd|>7lOU_T1}{tLuR*vy}StlY9q4= z@F*eNTc^I<5l)~U{<{vHORyiY)Zb|EES07Ob=vY2VPs*^-UI{~U*xGhYN0To9k~G~ z<>FVfne)?1Qs0y0AP)xg*gTNN${i)jON{dUw2J$A(UPm%($4H{3vy-~5LUB}KDPY! zwn^EOk&@&c<`Dyy+#&oW2&)5ufk`nO5h>FKjZ1{5wm76p9OPQx^$fOGR%7HTAt&rH zP)eCJ%kKjt(gLSXH5Jw?SaUvdpZyc%mdEs6Au%sV74RlnCxq4gD54_H>C}5-@~c$? z7ww~1kzl(hOSRlHEalhmh0fC3C9%>~n?$9;?lKb_g@c^GzonvS8YzwwPAK{m|S!*JB2|NE!% z7KdoA00uG>K|V>IMnG5a4rvJaSz{B#7xnN$22a07MNox%QlaOA-IDgB!=SyYuu&)w zgf8l5Gi*RGQ}1vdJv;-&0H?5$TfFd!xOIO-b-FSH^MHGa^utkAm+-o=-45Z7e3neL z#^w|jvnf+VUZpEQf~2|*D3MI;m3gr(uF=11i7QaA97RLlU|Yu2SgJDm$gd z*S&@hmWKW0IwLJ~#2d+|B;LeHg7CU&%}Rx}t8hAHl2}z0Q^M$)3HDX0Hn6juI9E$W z#ng}T{9T+Sw~M&dUQs(~cOhicZ%Rs$=EM3j$-G3U>rUGfY?U$tF~ImKIDv6M)3s+| z1{AmFXSRh@HyoKW%}If3aC3?u9U)KhSvhOO4#y#%H&M4QIZ18uQ`koYq`A8-ZkzwX z>{LxI);K}yuUp+}8|Wh&#OFVw$gRnz7?|lr6>AT@l;^pLjuP>@Of87LQ||@);j_azqEhzN2@^|6eshkorGU~V-eALmF ziHeFQipOf}&E`O~;0?w6wL~lY?u%(m;WvgBG>=x^>mxC1Y6TB86!4`CByO*p%4Wpi zqCgR{eLXZ>4=NWLv{0@^l@aWadR`MidX)lXmW(H$V5`m(LSUs2;sozd6W*VX#|ac z)%J1}l{&1y&6Ykff}^0C1Z2UN#li;5`iEd3PBd{|5dUownS^Oca$;a-T>zyBBd_C$vh6Vk9tKhyf0qMlXL)NzvVB7Nqz_{2P}%m_*Uf{luTo3|e>OmErpQ`}#uvt3pHet?!jpUF6T zWled#*_pncumAd#dW6Wi*C-@Um7(Qp+= zF{nxiKFs#uoN^Et0=wes_oSBWP(_=WoU@{Rb^R_V2`VG8_qB>ylEW) z%0_$n83Ybh6a#AqMeYJ=f*Qx<&b`<3ysD<)N*Dycsp&OySCi|HfDyq z%3e)}0^Yjr87ZH373zC#(l(~hCx2c8Az#*aKcrPz$Y&W95tx`-wb}tX#|xL6%+6QQ zj})YvF|#gFir2NhuCislN~iH#2j7{2v^Gtsg~HP+sH9?1j$MJbN?8T=oseQ>SG+Rp zo-Afwo+WWK$bnVWW@5#rn0gIJ#!*-%@|8Wyscx>F?w+)W8@JzRA|*PrQ&v=h=%9Uy z0`);f;8lgA#@jl9_pnTLW#Q`2a#A-V)P37;C7i}+oRDa%O*fNm$hoj7iHUl#*r#}8 z2;{Tv2w73K-1b7$IE^mzt>al_nEvkEW5bZ8*4QC(-`1o$ya-8KWpCwd%J87*@mz%@ zr7{#xtiZW#_yHZ$ZP02au6YqWlsNKa(5#{@Oz3HD9#?}wKUZyqUC=swhi1znWOmy= z(}wa0T>UdEFBbN`U*F{26Du-F`l%w}a!vz=Vk##E#7MqMn~;HKGCRdmIN%JHJ}>IG z+PP1S0gBP`N`ZusXZ82Dwo&7&`S`XmXqfa!d!*Htq1Bj)MKS~bvof~rNXBlDzzXp= z--rMz{fEvS%9@n%*Cdh}OGK#bnt+k3vcaHu?|&+{?O*uh=^4@G{!pDgo7Yw4$qC~f zo^&-kYGwgIGS48Qjq{I&4H{YD=dDIrWEl5O*tRp)HX%>0kcPWR1+vd~Mb;_F3>7BA zTev0W7MT+}mZtcmSe2nccUa*@LVvZ=VNQrC+huH_rKSkRL?bXUL~l?!YU45~!xBxX z3{kq;?$9R!H4rFUO)fvW;ooIscI(#?yl(6Tg2E+LI6Ao^UxiJ`?P6zTgE;e$f7>QQ z6=!uuIHue$9V(S=t6&HUevoDCT4zLPv$o88ZXfO}3LC!EHPLvND!DQPAFHbL8wHZe z*O*FQyCR6h*ap)uy*<5su4peE0*!`752K9F+n0>C7&Qlm3v5WEXebM*b$*BecPghs zjY-8^z83gw5tk4PZ1|Kx8X}hO9FA2F-)>Ah+p58!!y>||V`9FcqLYlh!<$|Jb_v?O zi+93ETd)5JPGX08OEox;n<;z7f@m>YYy*zibO5VG)pD4^$({otAZG0;&Ktx3iMp_T&)a&y(D$^v6S%`td$2VW5Jyq zI4ap54pm4O5*a!PabtdF8#W=^a3jIWU??;Z&eBba7V#qQu+2sxSTnIGj!apf_Vt)Z zo%6$f-^_Gl%UM#?_j2k3x+<&+XxzQQ1kL2ue~{toopl#2cwJ3|H))j*8-|Os+)s8|ulZX#HUeQaUs!eh>Px ztJazm%+`?CT4Cl|uZv#zRp}V|DI;#})?TX++)hsK4VPBxzk(u8_x^n$qG2E3$m#p4 zuLh`tRzPEJ5%zqd%MLKv9Lq3m-U8zPv4&O?Me9)`*OwrH?gWj6Id=jkGI;)}G* zMfo%r8NyUrZjhC!ZpH}Ti5dsP45KAF&5O>=E69%b!)j6BQ)Gy^lRlG3e2GlWA(l4_ zMUSFWt?YcVZ^sF-K(T2MK>sV+jYm7RHkndb0LBf*l_{gUi1ws(t4GFE7ez{solb?{ zS&iEq%H783|L^(RdimsXXPrmaz9Wf51aGo4%UA~;uoo%m9nggLNkIagQOT6+$T-c2 zOkwx2+-I*F+Pa-Zq$M*lk~wrK(|u{v^qG&XI7^tw{x=2tqmuZgs$Q>PWtbSWwA`UX zib9MM(pmlgdONoy$FXF){%bBt!+8UP(MJ@c6iu^YD#gmWDyz|pKt$X9317kN#iI#; zG~hhkx9`WhBU=1s?mMznO^*)I%f)U>h^5i4v#>Lr5TzjvSkXuKEd)Ll2A6sPf?Zm& zv|owD$g1T2QD|qD`-e=DYs7y(w)K8kkC#8EQDq{<)otm;?aU{{q2+mge|cg! zd!FcYp65AO=Kv~=|8-N3epc3B4p7)9n7@ZdZ1)BV(X+uF#Qg2^V1J?UpFjFJvFxAz z=}L^X&1{*c=vwlu>NSTsQ87xoZRT$uA5q8o<_OCKq4UG$GSjMa6DiHpEC}*ExqWoqJVQzuy^!nWeEVFTvzi0&3qtT>`lsiZSmud7r+KOsWLP*n zAtX3QR*lafs0LRh%=lF!@+4hEa8yw-m5Y1$8%|uz#yqae82@_MxIa2h!b(j&`B|m| zVTVIbT6L(q7{_O28LG>nK|?nP*pp~1Iht|7L0GD47@niz&$GBDd7c4ro)?{!rv>gt zfCkK>RBQG$sZySo-IfEr^Ak*T@+66N)Q3d8K=loul=Br94FJw7Fzq!qG{`4)%OGy~ zusSKcz`|R7n6Bvp^+IaZtdQNU5njlUhZ78&ld5x`q+rXW0)^|~aY=a&^bgR3j3i5) zCs4ogG`dEfdpup<#luW+?#es#eBqVQe1)K3j%EE7Ty4hgp|z%{KW9iXpunR^(fUqfV-J9(yi^yiC^W>1Tq zJpub@F_Y|eN~<<+i?B=b%BzmyiHyU?&F({x5r9B zwx&EJI|j^w9k_v+*U1t1o*_la^PHT*r9x!MDsl8l)9mNiiRHjx{239$IdG!~^MQx` z;llCL8fwlI;&X5ZrqHvFCleLPAk&BF2Qu2q*wnn`38Ce|lS4jFTI^;gRInD;#1lXu zo2hry8@9YE@Q>^;qV*)lO+dK5I`vG6DZ7rcc6o{_K7#FHj){bi>zvZz0->D-H$KWK zNEPTqG5;|>u_UwjKKw7%xRXkYLuyxv$h!3i5yj zyY}wp&@j<0t=W+->?3t6*Z5#@!^O|Bu+{N)pD3lyGbPD+nq^9!8n&4~SM>u=bus+f zGpY!G!3ZPZcmviuvJ&^*yzJAkKwbhlJ3#M7xqfxZB0wJo`W1Q5Y3E6V$>S_i_3lP0 ze*FaZ>u>>1!jjIhA8Fsy!Zh{}U++Z&8&3#D&(qwgqv93Nm_;JTC=a_|IGYupX_uF1 zb^${GIbI;!yj2s|g2JKiEC?lb4qr%1Y8V>%*)1B6jt{7jN0SU#DCE^vzpOh(j*Q@iL`6kr0UVTz=kj&AR5)c4rgpA)s+7dPZ;`Xl|yG>Im)r~ zlj8RBv`mDE%@*fmMLY?rcps8!ehj*#yML~*Vf#EzvN+9q15;(j| zbE=Cu`sv$1)lyLy(le}$LwG(Ol$U19%jdW~M|`C4OV-Uc!r{JYr)5*);b#P4hP_BQ z+DcNy5Wni`YQXsLQ=DLC-HiSe+D+`-=EST|N&$|grk_{GPK)=fjFqs(fvG}OV!4K9 zAo2u+3C>!(ZAJoX?OpgNMLN^8iN_c*#!Tw!VYOJJBayZ17nsa1#n8tdt)JVD)I99) z4ER)=`rG{l`DMf2YvDCvueL()-4hVr@Dj3n^sSvhPNR_eSq$>$Bm{=MvN*f0TO>1v z1dC%1ka?D4l!1Ozs`=w#OOketv7Ll;O9m|F*8+MY^P(6=a&vNui(O8u(o-Ly$e=?} zsNH|-eD|MtI&QMl6No{uDs1{_x1)n9wbE3E7+|^K28%kaE$;o|SKX>gUf6m16c|ynngtx1?O6)nHqWoC;X7Bw z)$vYN2)XHQXc^BLR*8f`)z&v_YFXj|^y(O3s4H^T5)re^Ej|)c=ay~R6C)3;D=V!V znf|%>g?A4HAk+73l~6ExxmOQO%Fp68*wrC;#@w@e6)V%Y!Vtx6m3|v>t25O6`I5ny z&%^=j4L9Oy$}Fk#0+*wsFC5dIrQm*{#ZZ-a<3HkjFXw`D?Ju(&Jobwc8mMiRA{TPi zZ`}!@;=!UbXpYBRo3j_2i8rA6KkJmhfQD zRUHRp&b!{fB1BXsmsLl-SU+W{B_~gst(DE0+tGBjo`&VCO`{0z$0}I4si>*ri^dDF z>%{_!wkkDe07gp!EIe9WkT(vH-S+glVR3XyT$jc)Q@qfAW+)nzq2(Sn5XPu&(g$sl z>n^#8rJYvWW$_1c3kf*HV3#90R63x#Le1m7q9obxu5c;Oa)pcgjx9Gyv;i`VY&vGg(mcoT{zn69$$C=K7H0o1tF+IGS4uOW(Q7<$ zDtq9q<);E0Q!9mMS-KoW1C(iUh&>58KXV{dZK=5lHk9)@v;*s>~|c&{*t2j_@zZ}XyUZkV=L9A)5!sXUfHb!(F*TOYL7@K(!v z*DxI96W5!Tm`)b;P3rvce<#O=qfW{N388VQD>(NB4qoq$w0q>+uM`9#Gk-G3omb_I z1bk0<(*px3S?n0*&Ckg7qebYZlJ&mEnVDZo=-snzl0ZOJO4l_$a576DYn291ptCH} z+hJ}N_NxqOaMN@#|WF*b0j~~7*!=AE~ z<0KtlE^|1ayQ(@~+p1HzejzE^l(@29sPW51-{$QG!|@g?9?PE>UH zK2L4R%1SvMeL?ZPwM%-Xaf1>r6(Za^iEPZ^aB93aa$-pm(U>$b+8J!KEU#`DKvVr# zXI6;amBeSMRT(xg&5Cn>ClN&27aM^4?uD^`&!@u#6lHh+l0^@5<0G(?UUcMmkD2f- z*Htp{058~7+wg8%58%(57ZU+{ZF836g-XJ*tP(@PfbFb(&f)1}>oz7JZeY9XUs+R_ z`qu3{AXaTF0?m0!3g1!Q0~*$4wK>a+?t{kK!fUyo>Qc7kb`V7IRpnC!&V|yA?zoz# zSw@~Qt#whP&|ORn+uo(_PAjr*Y_DbgrVsWEneGqS#?oQvM(7v-R^{v;UH#L&;$NOX zMS&4%(*8A}jes>Sjf23n|6;-PGM8NqIxkmxl6zOLS=zBxSmLX@mnncG$!AYVA35%x zIi~)*Jwarto;PVTG3}f>hZ)DatD{@g@q5lHcZs~1vctfS{u_dshxD%?TdnHcKWAox zyiQwx`%9UWn|E%f4WuycKf|QXJFP~>oL1HT9bU-h=%pO9Hhj2UP>kqgyMx$AQyAsX z@|Ck~wprEj89S2n;h04mynZo!O=<((&>3QPaBdE3C;M<+)?eG4+3w-H6AuVf&HdN6 zjrF=~I&BVYg8{6a5kTkal>d}@Wh=rf8pxHU$!h}4PZZ{R#e4=C?)wh1Lt02{$zc&d zl{HzBuM}bCJR;tp+i}K3w7k=O^SH3Z-SQXe*^!eyP(GLIx-f50(XXc9Jd-uK>(k!f zG?)xp3iup-z$>s8sv&^C6}l&zlr*e!5Z*I~N98WjDQNzZx@&G^+EPxv85^4;KOw|0 zS2-;vf}e{H-g$UX@D!)|hoR^qbA6}z!c6TUpBYLO=hof6Sr1D7SlK59bLWEWq$9?M}ab(z>+g@17PKpKs zI?@s9pVlp(FfQ$jq|QVLq*PB(T-XuK+(`m(8h6_ zWx`0M;S)JL7Ml+~3(M?ix7@d6^LL5ICqYYu2g`kl4+n zk^C#+Dz|1;aVK0&RQS6UI)Q>VA3Ka?ADiULfW5G?DB$fg)@(g#LkE$xS_CQLlx}Gm zR%uNyC8uuP*x;lciZn<81+r^^62)2S!P_u};5+L9%sMy6b%G=a*ZiSs#909n0@2LE z`&r0T!R7INpGMHS4RYB^sY^)|Xnw21Oyc`Mi6Zt1D}>ZbAIi6BoU-%yA`voghQ?P= zub9WFNGcMU#Jbxibl@+$LbR&01dKNn#|alnW1>ozf(j%!_^Gp6VdR2jY41ib(w2)G$^k=bl^hdj$%@j@{5wYhl*uE@ za2AM>Z(UVB*M`}<(t&Xr-|4`TC?QAzK7V~nnMw6&*-Wpqk1-<45H-U%QyPwQzN2)l z#hLlc($2lXUrsi@wM}!bKPIhY&eGlgo)ld8&iW!-UME`$)^&eEdJkoYZe8-50L9Eo zCf4y{s8#mbaV;p(BX}W>u>rk;qDnSQf!>nz1dGf0Y-}XX97tUBu32Z9$Z&C-vQ9B`0W+kN|O>$3$8)tQ^YC}d#?}R8Fc;n%-zpyvieL@~Ms=~+b zT@M^pX%){ti2GqnmDCHT=(28JH^*2%LXC@`4c@Kh8-78~d}LSh(uT_FkysOW*%-L{Nc}JsTBikguBE65z?DPRHg~=SO`}-(y)u0X$ z3Ju=2A&zU#Pj6~j69Cpu>JmeD#ZuG?K^*BJG6>v`FA_(`zim4|S|;)6$^7eEqwEf~ z&pGYy{;tgpxzh&pLE=KFZMN@x$$Fi$R`^HISy_wthdw626T^_6?a?V37LAra@ z>TWz9e*}98AO5qTe%ds7M*$B8c~z{DYo%J)m1hy1WIUo9gxyHA=wc3$D;N&s(bRoT z4D`CXZU*49)s$vC=p3)RmQ=%wA&Tm6AEg;QySefm{ou?#KTQ2#0gAuPF<<-OrkM5p zMck%Z-;FxRGVD4*J2;CL8B<_tn1EC=vTWDmu48q#Iy~7t`cf#1zRAYJW)b3*5EG@g zZoxBP0k-bW0Sp}Tn^5%e^H(e_^QG0Cplz}HT7$k!vo@P z*KRLhSS8Ku_#MEFkEzw5@b}shU@q5Fn;ds)XhL~(R(C-;ne15Zq$zG$;&Id91#AxN zVGS2pb7&vsw{Q1DHQAYI=xwn*qyfQV1OUZu?wK1!Bj;?#co~Qz;*GF(adihOz}Q(; z!7x*o1Df~?I*js-G*eBXBA~%keLBS_Y)f*@#s+!03eyMMNOzpZ@1j+SOvWoiA>(ZS zDW~0W_xy-q1ABGHl09m@~&(DBqDIUjbTv6|I+orZ7 zP8`+vl?PR7`R&j+wMKb$&j!-qu$3yhxTbE)>(5Ol+}rNVFGzOe7(>{LAs4XEj$+@% zGNyIcM7w4qHY&ZB3(IN;Y~T*tH^;Y#oH-qOU2QM`en!Al7fYLg^oX&nvcLwkkK%Ds))cT3zS=Vuk(KD}l$kqs>5!hAPft(N9*S zmT5%kBlxU_6}Gy-uE^iDe2BR7AFB1o&7$xM^nxska!jp^Dih9ZPp?jVtM*W%jISTWdoafdXF>Z%-zUN7g{|Nhhj*Bp3f3|!u= zLLHyrC6u&uSLZb*(HFU@NmsBs{5Ncn){`U!&ti;UzX<%0N-&uC+r=kc2^mxXeTdoX z8Jdj60U_{Y3tfkYs-(G7GG*C%k~=1R{5YkqPHZ@^?c7V|wcrbxvO@kFm5f3_UDb~M z-)4#I6W8xA-#`6~OLKIPFjLQAD=!GG6x;GsezWQ{c9) zxmC4H8fv>0z=E#2qe`|eIf*ol9FoIalcOr)sH)zM>_qA+HvC%wr<|p)a&Qt?l%vZX z8Mid@s`(i3#-Fm3=G-l$B{d-~TTES89BUi{>h9e~UF96Qp zm#btEf$6dXUVegvH*KpFDG`y`4bxO=|z3TaQrWa5y;tc7GCcmmw}bTI2GmK zMXAR_vfW{uXCY?A}&S%A(yREoR>+(hX-*Z{!N(Zk)SR7sdY*GjVF zq}Y=qXN|nXi=-}&?@k+XQCj^QmDssRmlEf7iEEq4p9)gk?LNqwR{;TVJJzJe$1_w5 z|BlwFadNt8?~YC>F3sFPjG7szVSCYO!So%;XVno50AQ6THk^EY)a+^z`i@dcA4Ub( z$v2HPRRj*($UCi%j_fI_+=a6PhaYqRqj z`0wlD6y88vZLas2at7|e2xNCxXY2TaD{z>Z-a%UeKPBsP2}aOzZNoUZDpT(-k{NO~ zR8R~D$rAO893!1e&b~zA0lPGw1q5_+lav(C#iS?JG|Br{(MY4H`yJEDx-^$M{yGdf zySm?x)*T|Fl9am0kW5;KpvB8iD5*g0;%LK@%oon=U!|e6xK%J-nn!3%%=~Vub0;XH zI+55J(X>>afDFJiNEzWZOzn^WbR@0ZBx(d@Yt{EcET~zRW)nLi?SZytqj_Olsx?Lp zOIJ%`qG;n(q`5q}kvY=+87b!7vDe9SdML!?)~1#w-{Y)5Nl-qwS-088`5JH!>Z$8$ z4MrjBSRBQbLh5TW7sIWR{hI?d#usUpd6i;^xTaFPjH6|REZDkPpTIoFd1UV`!MwPc z+<6)(*xHppqjWWaZKK@P3+7-uQc2h@NM6Z))5I>U zxwx$eb#Y#t;sLcn&KZ>O`DBe)r-7u6N*yH`z4FAuoFYFnUSigkYnG~bzr-jZX=_q) z2zBKuMclKj{ReyAs%!nN#QjG<$+Q>Tld5q#+INUCsxG4eER0ki3F#y1E8ZXqNU6Pc z%>cAZ5&{IuTX#HVnAO^=02YUNA4h^rkH=Zby5GQbrLQ_a=5xJ@8fERCPB5%hqoz5E zc8-3ulv%)uWVGLf`bITzzmU=EXi2VVNY6@2uR_t1i_M85zh0GFc|1HW`ihJ}1HMsF zg#uaR6+8P4J+4+485rA%Z}tp{Igx57f{C5vp2DJAs*M2#p=CBHDLXa1U z-HDK#EpGuBb0}aaQBd#ZIwP`>@m6J*SF+RHN?W9v2k2nhGuS$W0jN^=Ck1Xg2nvtD z#Vy1beUeiS=)WBKehw?@ij$cpyK^uc+FeuR1O?tmmS-#Ujl;#7z$w!G&^{dC$+`Wx zQnrc8-a`WWw#YC-8iYB?3eMDJa7sM(Nm2xTWiT!~x1o-;!!zzNk`+7YIZ8%Lw+k^w zFeEer>`>LRUzv)w_VJAG31_%+nmD&fr9%f>1#M<@<@i)J8awh?U|eI3S}Rqw&lyWxzi6WD&2uo>;}O%+4mYe%C!Ed7Z`F&}th7YJy$L_=QwLrK=8m-#DSbE-jmT-{9oa>;Rv(Zft7_zrEZ>;0(}ufqCRsyVXux9XzkOgGv#zY?MEo+=ZQUfnEgbJj z0p=W<#HG-Y?Q}MD_14XjAA?b~inf0vYqVD6{szNcGpD$A2vq6A#wEj%R~GGX7cM@R zr+OyRff=CwEU~7&Tffj*v07J*kXD2ScC6hbJ(1udiH$%e-zIH`z~d<`PlzB6_lz`# zeBY5FKTaWCo6e#}+v&gX~ zxAJ03+)e5g0WvREf)6*sP?^j>31Fz31fobkXC3s2q*81WR&24;R5dcoHTjAnx1#XX zD~|S>%3-^dQ1jO`)rd^YldN?}LM;2;I4aFC=b#*M$o@b#@J+IYr*RLiZB|`*^#3vT zBq5x5u;f^qv!48677))j>yjlYc@5bCZSmm^p+SF<&}TebS|wcqyzSO!%$D}%;4{AS z!*c2z$T!K|xc=vlzJZ6w{m5<)%XRWEN4r0KkLAdc3chAvhI}b7IM^P>(DLWk!gsnB z9?pZwpH}j31mAA;Vaj3-y-xuuZx83-+&R7$s<>M$S>f_9io2}`v2_te|;-If7^rPqqAz9iaAf7 zGHJ=5ay|&8FJo=ye>3msSA6O6FMIu(d*{5LDU=1@RsHsgF6z0fa|NH>Mpv8YcPKs6 zz4TmF<%xb{$=t|a42;zGyj&|%Rz@CeRhu6oWV@+pUr9y3ZJWB3ryW`E?;C8PTT7w? zdTYSZ@27|D`oh)?w&&I#HT{jMl~{^h%z=nx9@8h^#^oQnf5fAIK!Kkh=QhDfIO%1h zSX!;WzcY53t0{p{2~ibjfPB!#PRy}uT-;c;Io#EQp9YpX>Snm#Ti z`jjA*G1ugN2EqbtSrsKbs;jPXeL@OG{zb=qRnFh)GyI=&Gy)7w!`G^?yDy?Umd%~5 z;{}zgDX-?ufTV9(o5Kc){B@*WI|$+h0ctz?ikV6F5(ZzTsy-!IJ1j#Zy+j|Xb^t?h zYu&+*zrS+7P@AiK!015@GH;}c%{wE<%mq+_*UepA$>Y}i<-N-ePn6srcOQ(GIGBfR zs;t`KhXROCQjI))O0zYQ0M&=Ok!g^gRps-76>N6Ig{MTvN1YDE%$h>;R)BWgT>%?0 z25YuRJ;g7HXaCnvbO}^9@P!cLMdA)5L>Z=8*thxha=v{opjqXj_L}|t%hKc0hJ(-} zV?%|f-O!ddnF|K}T&tNU&~?7LKTP2z&9-~s$Sa>rQDPR5e;?ZJRnBM4o`M9(q*4u= zzp!Utsyp}4%6c{J*g=YmF;Q2Bd=a6VM^@$gUap44u$}LtU6y_L7fI4;Lb>1Vahctl z8&1a6S+25gCn!VVM`yT!mvzfcvKwLJ_f>t%Gr@snlXnlK1+0!|OED~KGS6;LAgPCk z2~0v<%fxz=>_#}3?@#e5&%Yddn2Kv}=*hB%05?o?lI*Bx0WP!ojwHG5l35E^O)^^@ z{NR(iYOXxZ+Dws&UK+{L8Nms!xHE*NSveaX!p{PpHm+rVLGQPnLe{0~0_w}sTtE36 z#+aK;`5H^a+R6OspSSO{Crd4(Idq3&q_b>90N&Xg_{`exMGl?dtWr4LD;-ta=IG0W zE|zM}6AUQ0-*aY2fw70lJo)lxeZw5c(Th6@;~};^2+w3mEs46fFI)(>Wvo&zLI%m% zY@gp_{9*SvEUrR{@A{%Gy$wdmRCNXNivdc?dLEeMRN3v4)QVel=JOMBS*CMZVE5jqR>&K%fKi@V_FzgMw zKWNcbNnM6Bq*z^V2bUo=mW$uVSeljI&Qq=LL>q!-+g(^&R^H!AZw5}<;XBGfRo(m{)PyONBZL5$ zK}*xMKLilN$&GdMxIRYHEa<W^O$IA2?p@HSmH5{Y5W*1Ba$^MpEZxIuO%ueLr`X#@wW+>Iz3Vqa-U7;}niJjape zu_}bL%<{|Hk6EY!vohH{58gZ5V(CqD90#sgn)VY>6|VemU<<&GEM2>rC$(~okjs)9>CyuE^y<-U{aq_Y<|o*6JOgIQpf z^=p(lt(ZqI30muF57ijtR%gh#%o7cMkvh0o_rdsfR6G5J)FxSuoWOigO|uRn0ss!( zS$_AIr^F?Os4ETPYPFN*xJWB4OKu4;ZMGpzN8|C6aiaO_TgNb%+IP95G21(|*=rac zB#p~=$reTBIH%f}h(1njGn;6aCqXM>ZQ@~cs|bgr(_EalCN!$%hJPa>PnO>Eu|cwW z&j&J3UVh2U`OXDXoK<%NSYVM~5f1Jq4H^HQ<9zlm5Dpu@`(3s?!$upjt&7xwp`)ZW zQh&SsHplMp2V@X!>&a9-|4P)4I0^3{8fHq0a@_!n zsrd<89_*rfcaSl54l+ybADkx|5Oas4AqjXCr^P8TXMA=3+(ZKDeRFeRfQ(DlBBB@p zQ$E)>7A21Co{f3vUG{(RM5G$h?s>;frm!&&Y5f$b_bYg(nst3GlZesd4QKGm(>cM) zkp4KQ@R|DqvygVZCEjqZj%Pa^B8b9lHJ`4G#7G0SFdw~0g zwmkSM#Q56Ql!sC?$JLpoM91i2%E~hOXT|(&bxJiv>(sMfV3DLHI~fkp1*&7{hI_ra z6L9aWqj9aeX)YN8uWE8-B{-~9E1U$ryemd%R_hp})F+kAI5q7oKw_gxG95|?k`(UF zvSa+22~lj40vHZJHpha&q(Ub$d5N2`r6ortiHp_Yw1))vDWviQyIhL{<0GVXmBhZ^ z=0M9(72na&#Q(gSXP+CQtJc~AYLK-ciZt=W6?b112&E^4- zJ6WMAX-)cD!-%L^kxzKn=n?1*)q*?Cy>?mPPr4DVp-oJ4$p_aj>XO#8ynf+xD$>-) zJUiZ?(zEv#=8;Ri_Zh?4Yqqp}aAR=H{Oc5Nii7+X6K$OGgrb5h7yY;&Kt`#obAHko zzNjsKg!!zoAbW9^9kCH`zc{uPgt+KWt6d7YM%c#YdA z4}Q;zV!M$oMa!uvcdMA9P#nn)W@(!w7lVtoV}cNzrf`xUvAM%IdGiG@NA8N#xRYZ* z*q0@LZ&Cu*F zvQa_tWM^|8p=8tDn{^)r&(th~L*|5K9H%#gWk}fJWs>UlK)=>W6Q~;*YuwAgmNaJu z-gsXXu<@E<+gsxwSl_F&jrk20%Jz(5C~Tj#ES6FqR)fmmd8d`eHS9=o#6>wX$(h{) zRwc8|ZUsYPoAhHKFVb#97~xyGw1r&^0G6J0TU1JULuf|X3|(=F-^-Nlx?-A2Et~ck z0!frbmB>Z4SqUp-6Rpx}4XHCpI@f-mbxq)ti;f(GdUnl=QQ+SC%RJNjGmOujww%=8 z#TB$)gf2L}eMc6nLSwIAWKneI^74ZAp3Z`k?Icu4>Ve&Z(x^Hh?~N^(YLSaP8g#oF zsBsISnznO6g}mH5;9gs-*c=?dvda^1uX9l951b~vc-GZm$yc0b+{*~%*7_qu4nsXj zL3j*twp9_>;`JwkZV8lCOtW{0CN3a~m2pC>jkCS^9bxcQj<|sjuV!ief@W(n?F@>C zVhLN&NU4-mP8d)Vo-=Y`r5Dwio9^1w=}Ivgx-NO4%*UceuDd8>l0NOqT9TSidQFm|1kjISDgC^CPrzZ#^)&2=ewqOId`mzZR)JaczNLZB zE2-us%m#GcCW)G)kW^FpD|ufz;U$V`vdh7oXsa+wB)(IGSC|)9Dew5~@RJz{w@UB1 zTWohyrqY#DLEC*@Crc~V3aus$DHDu1+#HzP4E#{aR0HiC&0& z9dHfQCe>C`(vUycT5*Y^({gZhW@Dzp=xmvz8^V_ME~hEHQ-TuxbyeeLm{XKzsK7s^ zcH7wBD^XNaGv&>d;?2w`>x?D`rAtvc93ZPRnT#igCYkr$aSmpw*&1|VG>NN#f18yl zaG|7+YM%Vpx3umlIVqOm_4|)YI0~|${5=+~mb_mILr&pJ3@icy9@gXuXz!(uwK`8oU{Iwox|@_icu+O#)TIQfTja%#@>sVxd_2PibFQ&H6M)}Iodm|US7Rflt8a@F9zBmdM z(bRSFXiOJs4vdLr3NFO+h=-YE?si2TTFK5=2j!d928fZmLuq9z2D0j^aGk}4dqFyn z^)`vWWbVxh7AMh}Z;)k>mXX|DISD;^ztG}mr&W3YzRn9AKF8sm7IQqNwmr5LU9&V8 zi%xIrE*8FzMb;(g2;`h~;+|gVPCp2Cu|r5t%2?tccgO#IOG{C{737FvKIYENnw0Fd zlbrs%zWnjoxNk|_5(Z!iDkq~GHEFy5$Yx2*8Pc>JZJeFmB>z9=Pboobp}+)lO(IFC z_-ZO9y_S3JAkAKYL@HAmUuK1u956LGvglEHTnMg?mv!c%UXdPd?PXe$#H~}6Y4c;_ zLoi8J%%n^HI#7Qy*1X6BaB#e;?yTD4`m)MLN9Cp4GL(NI2d&hT*%}RGRb^~)z>hYZ z(kYyLf}PF&ak$17;jL_z+c`@UwlfL9++16Tf(AT|ZR7J>h3Gu&zs|OTrcO_=Y$TGu+!+w$~w_EZXwp*oFxX7pN=Wq z=1k{b0sE=jCM9A(v~Y&|{en_iMSia&Io*;h#6c-rWKJtdPA!{!mOa&#ut$@icgRMf zLfPGs$gJq6?Bt1)x~bcUDGJpcscn`<`>p(W0#hg3>qHx|B}u%Q$P|8|47ZleRq#fj zU^SzQ!xIfYq)M)3Us$L7c{eZ$8b3K=I5+y=Hem#A$G%ukF&JJn8p+#QODVtN}N9=}H UPWKi5KmW)7105h={#>9C07z*^5&!@I literal 59986 zcmV(qK<~dFiwFpbxTj?T19||x{o8Kj$krqb-p^xv*MA1+%MWzUs|*G|lvI`~fTDJS zrTkh`gL(G}XQ-;AjdXKG+OjA#HqhMwGer7fEn9Z@fBk>|=llMj|2;o`+yCeP`9BZ; z=g<^Om6&z~><_iqpD%YQD@`20EOnn=m}hU7 zSJHiOQa7iG1ancXIjF-Bq%r<->o>tB?JEC4Bgg_(xtY|HygW|MQc7 zWGa>W_1*6}Q3vj;oiD2fEXq`Cp#FvZvZ4CNS53^158wI=-WF;%wP*KV3;FOz>wZrQ zbtXUPH*^kMet9wW#Z=LIdFik7DJOav_P-~$4qHW(KuHw29dubONwr+g@!RtW?SA%b z@4gj>MQ*xPe!LgaYuU;Zs$cR{r}zKSm8pp;)3)plEqiihf?|fd^jdQiMBUL*P=kYp ztf||U0!IzER!*UcUP?yo(aaXTCMtT5Z`msw`Q|98+Kkx0pvj%nIv8C8b2%5mJDH7m z_EUoE#L`SJ6xp6{FTJW>`fHF@{~J8797>m@t6jQ#pFdU*$VHupFX^e@gXY8?Sg$Gw z;3O){*DM1;FK3mB&Z_hCZvmsA)1|gb9Q@2K5DjH#f0uhsHONIJB0miU7bxGtp>vDnR|LD$ zE=7OELPH`+35lSO%V8o1Kwl6*&mLb@MHaby(M3a_S>cdtHnvI!NUP;#wtxPMFM$7b>`0DnMcF^3+qHq{Vw3gz$rNLWeQp7s6)x;H!C z5USH0C>g3(bHEVb@GRu-7X9iBe{*ywDytCTcsyy&{2fpx$yEZ`7(C4m6~laSkYu&N zBbvdI^w%e}OxTV!${>R7g-uo28#+tq`75YRX^<-E$aErF*NU-5x-8YyDQlKZ>5jcv zC7W>toGbc8gR{xCNQcYCkrljSxq;)qbgdZSw3;mH+O3zA)Qs)hMq?*DL`EZA$6X0n z2ve(_|5Z!`^6OSKd`q0HVS`;Iw=yp~_0R3$kDr}gA!c@EiZHM1pjJlVhZE+)7PXR^ zw!Li4yWX(yg99Ww+1VI4>inG zsdByAim(sdK}pLsKCL_MUN}=_w|silSq^AOF9&s}pz6U$l&`(={@zS67IWy?I*WzX zrSwW$3nUy7Ovmsx*~Gc3F5(1=H4?SjQ6WgeD2G-#-36@O;DOGMpMy*!xdJjKjlOk% zGe;pm>15m@6uU5d=pk7HUlT~&o#|uf4_wi`pSCfTRv-{~@X>(xV z>LNO4Hz-}?3gy3zIiy@Ckw4sPkP5DL(AATA(R4nkuA8HQEh>#QxXZ|yDX3!ED<=}g z=Jzk(9{zl}c@}HI%KQI_e+CnKL;m$A4=fha*kS?w?DF&su<LG-gnvqNHCUkz{ik6xHF4Vz~uE)XrFidF}f7K z6|qYxyEMxk-a6FyMY2ORO@|7(56pv+F?XbTk>id-mSyvYFUp;ky?2wyQ&@5c13qOK zQu%Q69yG=B$bBN(QmFNfwE_H@K5#B$t6N2Bi>(|Ji7fU3T_b@sQGGZvpdnW`Kp^5L z-0Vl^V1MaVZc&R`A1Gg%$mK;fgTs^Yk4G16nkfuY9gGC4S z(S$%lNh2X}(@f`%AD%?N8)XAtHqZO78n{{_NeV0I&bP4h66BdN&zIx&vrdMCYBrb( z)qjTR3{-mzP2UrD>{eaF)&(CYVay$d4@o*%fS9_| znFwjq91uP*9p-0M@eV^1oSIaMk@#%Ja(B1(cA$eP+>@h;9t}J?BG=8OSJ4C#%BEPJ zKu_GsfxsxLI!3Ndl#x`R2sm;Lc@lxZICzoc&MB@TXiz|s2>an>m3j{vu==FJ?=&NM zm?X?y>)21Va3`E;&d!XPGl8UrCDri>XTyP|4s=LoEO$V|!hU+j;%}}}l_ApD^U|-5 zKVFdN^$eVh3U?25eI-Jn%672fUCQ=uuz~AHN-~fd#hyg&JghoS0gTLAsL7~FMx)rq zn;eu#Fqlzh1WPTKG>TcoQovr0q$mZMYzJ;-bPv^3NYO#jF>hqV8&T~(vYI7%hfs0w z9^jg(bFWIyy-{%Q%nDSBNX9E-mJRF)Nf}y|snB$!9!X;}i1#Vnf;Q259vI`#Bpz8M zXVfTHc;;@ft`l+L8a;_>8XB6@B=JGkUm$5-qFV3hNGsQpKL2Nb`TZAVdDB$re_>!o zk=W98Y=RcDA-@*EajtZYZ z9`WNJ+v7oM$Yl#>kG}vEy7b@BHdwb8GX8v+kyI)t`UN$ct-pXgFivLa2CKyJ`$838 zBA@zRNAwc%v3ry? zx}s*s8A~pwIOe6$Z)k^=YnXz|j#F;88QW=H3Y*Whw1Yn*p|PvOkzvv!<0}kxbNB{0 z(Qqy<9hR=YdaGRx)vgYI0_Z^_U5UZVnQqYc2os2AhiVmy%L=`O9zu5kMGGDcXhIFB zL#;PTv0q$Th9vD6kYL#1DB1v-EPNs*Yy5^DK-Zbso8SpyoWbH0V@bJwRzfqpnor8w z3s@NzkMDq72k9@dMi}){1HGLwT|b9beDR0ImM&FX{|}*I;e7_yDycZ!nWnome|#w~ zUqkaF@1Gg-WGP>2usI~Fd8V&vsMd@wD5VUdJa6ddW6%ewCzU>L*(uyN;6T5f@KE*EQ>42l-0q-E^F_*_H+x<(ci7>(`dqR?z<=QJT zF00j};0EZ?kI5~9a7-r~=WV6Q_?c8Vl zk=pi~vYi6e9sg=JWZ=Je3`ZcM;8F}FR1H-J;ISR3$}fBzLZA`xIEMy0#mINfsotw= z{{8y^a*nnvIr6v9Q@_%?{5i8n;|9h&eCr0SS?NN4P2g&#`yAl7n;Tw-g+ucPO<7Afx{i*lbEU_bZjy}i z-aV#IPw4^9R~#;^gQRFWlHv}L6@CQu<4bI4U@j00jHOI(SRh}#gTu5`hgpazXCc>0 zs9ylbuvCv#^%np{90(}ScLXfQZ*NP)Ja!S-Q;-;uD%AIWsjm3Ll37(c9kW|0`abV^4_R>M&C2T3e>#&`;sJqZh z@E$baH2dTM(gR7k1Ny0BTOL;`Fs|m+maeO+$`{oN0@YnA55$kj$w6Dy20;A?OmjFrq|}uwcAUgws;_-_ z6JKTnOq}!h7(x0q)`7Ml)sJD5PAm=W*M|hJY7T=x6rAhf&3mwc#RO3$#bl5cn;h8d zCM2(K!0W+bHi`v9ZXpRlnUe5?EY8Cm;(Tep{>GfD2V^ZZYzI{VJ*ZppAkd-e7-au> zh`A%G%)A&~+UB?EGKEuvV_!#YbjukyCA%nZ-$uUu^2}xEoBQk#FB!^#!NOE=-?=6L z-)~Q0{{)JI0M!2y6%+?XjIdT{dQF%`>r&$;orCF?V6EmsFj%An1FRHsxL#M#B)N!E zo?!&D1K%v^2xkWqzX-dDgJYAVnwlN6;(03MKtIqX79N*IIs>2GY3AcNPty4>io2N| zpsg;Y()QQ1^rUVmb99Di1YP?p6 z2StALRfVIkBe?^>am$q7qnYMWs+hD^+4@}EV8Nqn0_a@$KoFdKl7G{^Y{)ivg?e;s zN5jzn{#HCndSFP!$5-XPW+0?6n+uOt>D-&nh;}sflQsApX<^J)YCXRp}D z6~%5lkUn;(+HgVo&(>c|E(6NrNUGX1DgN2lmlqF#UsdY+Hsng+kpGl{d2;cQ#{=b^ zkJ%!k=6bN|E(K;4;>>MiDxH{SXx_E)Y8{ZPj}+-8{BDReY3s2{wPs{ls2V&1o0Q|S22!J1)fuI3?P(FVr)^WqW%5due1h)( z4g?|9Wa&7!`to;rKfQ$};L8$m(TI%_VCBg0Fn@kLxqZdee}5ZO?dC`Xkx{ek1Gti;vwvPP_`^C zBJGp#FE}6_A)7rne-FLrFJBg9!)P zGB%y(6_0=xU8;+teU-+ic0sG)(Ogw!bA_9ACp<|CC%qY0nACP{2eFj?uDJd5R1b|t zqYXjVS_cp`LK&wA4*gXMgC4{Bf|Q)^>Ht1eH3Qpu2XjRn=}|rG+L54~4~0dMnC9{H zU{U}DZMf{?tJAk&^#NRAlt=cC@`zI6;TBFxRkJpsXTz4bbTxKOkWRV?>9-48dS72> z-r%qB8hdXz3Du_|MViOz6h3Fl^tm<@(hYlx-e`l6H0Y9j(j7B;K*#8ua!57Ab2^WJOldymU<~L`rmc_~ z9bJS1RjL%&pbaYxCTyei2ijOw(#9SQMXLrE-hru&Cy{r8M|cyS z22>aRXVnov*L+f<3p>j3cIc$iNqPapwxd*6hqoPRi5wiuuP{lK;v^e3Y~VYmM|9-1 zlJVVt4g>@!4rHJIzb`lzr-tgH;R9HSLmLG}(e9o35!j|oyFa78Ex15qRfvqOkk4PL zhyG26PX73^w5Z$A|H5YNQhLj|4Gy!E3}-10CZQ*x^Egg9<2S?i2vP5p@*#2gdr}iU z>hU71$9H50%r2z|G z4mR=M`aqlmN`{pPyhIe&++L}KeL@^x$E=_XE5V`0GaXL zn@1u9hqK5h0p2>^3c}0(NHpnMy-)0=45`DpI4mDQAc5V>fVa~vn6h0+ND#^wNr)(T zYt)?GY}4)0O+5pHygIww{0Ku2uE-KLm}M{K4O_F`M!d2cV>--Xtybw;^>bvkTlt!) zI^am?%km!<6h(ZeR)u&@yYHcanK@lHzWGqS>J-?!M4#iX-0if(26^5Dw&wXYMs)T{vI5 z0wyK|Opeo{&$$n?$Gud=K==2Adg(OVy!@N4Iy+1Tf^swv`v_Ku@fUq~Iwv_en`Vcn zo9^)|QlGla6z_y1u6U5*T^_X0BK;_f?yKN5CLWaAGK$@FEegLILT!q}__)=$LM$H} ziSxq`)!_$FIey8|y4rH{1`#yL)s-I;`OG@Gd#0dT^IeUFLdNw3Il+ z$9Cwr0V&>6or=qLfIJ;%_F8Kq@5c&i5hLAsMd8t99k$$hB;=#wQJ=Se0}H2kOlOcJ z>TzG3bhjPMMB!(3NyyU)-+)oeWFC`JC`10Q`q<%MHz6nWz&wPI$GI=Vv&@5kYgS&^ z@RV@iw+zsfoXdwxA@TSbB0(-wbtz&4tYXuz>f3vB`QGS@zUB@n3e*SYL8^t$c9}mt zur+Y>puH9y>vfPHn->RIrRr$r1ABM+`AF+DEopLS+lTpL=5d|+I560+$o-Pbkyn(B zyPqJ^?%{sfYksS44nMG^U|-Qw{Ig%h#OMLcpkt!AY~rfi#9l@boqj<$^lS5={&c#e zTh-FNGcM`QI9*tr-R!#+TzIHD8Y(ti@Q~mQiu#l$*PV5ybknd7H_1n3A%=jY$T3CLWgoPr-A z;R#91suMX_I!JsziN|oJEcPI;bMfg$bUN2|KkII1=;H0rE+3(iRmeE#!>KMqdX)6! zaDYm&954_m!AU=p)Li4ERqJy;JoJdNVhiV}MUP`Y-isuCjB8(&YIQJnaX{kt3?j)K zZm=Z0!BnQjy^^;(=?t^O^iDdwgX1F)_*%IK`ZR*-a2Hgeq6j9O@qpOF>Q!YbgWCG?eRlb}<3)syc0ky&p)qtDQ zlwwnA{L?HuT%}dn3|`6@rH$6J(v%3hr+YLEj{wQ0YwZoNN$DT&879=bL4{uyM}Y+w zH;W(~BWyUBfagtZaB=c=@0-;TFaQr}o-E~m;{zNnYu#1d2Fyuln}wzj7wr?!Y_gcK9A+xvxrM)F5`nqi_%+jfb^C zf*Mku)~Y+p4l|?PaYNfmyoQlU;u2?%(;jlJ_z_lBj<8%q-t8B1_8dGBo`sFY!CMs) zx;9K}NX3=I|6<^VEc{YM+%boFB?@(Q=2j4kxXix818l^ZmdUhMkC3v_}6kUn;1W2JR2h!=> zOG;exQYziy*xR7x4_^s1t!^FQr4Dl46?vg~d~CE_9Y&_Gpm7!r9`}&Y%k>2^`OJd? zRFy79z*ggOOStN=4RRg}k6*~(iv!|BM|kv*FefFq7{UOvi#`z5-m#>~gHm=+x4uST zGohvBhCSS}3osv)CT>5zSQ)P zu`e)>5I6N{z1!S;AKVuX(wzqwZ0XS5X`8!zPhYjYo7WH=D06-MxH|N}rZivQvQ{tk z427<<3qe?{$CmA}9s3dR5+h)f=zZR`^8yDQc+lIB)6C*dG&EtE(TY zr>-9LMc2G8x)4^Ql(5<+?82bhHbxOnQGR?`2K}ws^e-)}{jbBdEt?b^COmg@?_aZ$ zhPI)MH=j5>U1iypZBI1eGy4lrSL^wix2(18g05XI9vHnc**mm5B4-}H?A!3D!-I%> z*6IEmBv7TSRa9y?+;oSt0P0bEtpytME}D{g*Vb#-QE+ikHkvLUWrF}&jy{z1&Q_qy z)-~g@*g<_RJh9;@%O%Gk2wzUyS4kSy^&QdFTmnpwvrfv_*pTxT{f&>Kv=3nL7D6v} zQjVfASmixePariU6bE{76vhT#Z%weBXOF5yTS7|dGH-SpdbbVaSmEP6+CMOoN4u7U zqkx8ROP$m~Y0aNU&~embGF+vj;pdZbinO9yF1bK#-3HU4>p=W77aRmkkv!%UI)=Cu zAbJF5{=p$88BI=SejaX0TeOC5Ntx~7K;S8n?Wj*u@&l@!J*a+9NNj74=~WqS1k2`HmOR(nmQnoGWbANT6%l)KAuu_;NiY

    kxH&TvLa@cR6 z-C^I3-=rztUT#m-;oh&hiyw!~&-_6>5V_DB%gH+P=jSKV-i7Q?S9r#)M5U`w?AjIh zTAajNqwQvo>I)QK{PBQ|Ig7*L*$396!>jKL91`C(`I%ZpWf)VZVvrteFkEPIsIQO( zNKS>l8KHWmM2ChF&%@wV_9`m>I7&`X$a zHGt?Go81;N#1H@R-$M2wuyq2x30#fEx*aO$TNa1XIVf+K;|eh46@aaC9eN{-5nPTs zPduej&ot)S@pfZB2S@@|jxM(Z8c@s*pobFj7*JL`cDHhKLt0pLxPgRa`zXK8VQmU$ zDLZg?rdR#B55zgNOW1;cb^tsoWULpjUE3Qpd5p<|a!xkL5pe1Hg7nVEj>B|`4H-#3 zU2&Cm#RW-HQ}kG&U1dT|4rsIsPcf=(M>@xTFx|nwB3plt;?pT$B;FpfP+cGiAqol- zY577K0$Isn@`!RWqsOhTyVN&a=F_uHC{}XecKE62de@-|??dc~$FH6P(&}oJr^%-3 zhBl3Cx;$p7!t=kiTesl|CJL`sdf2YE$dA}$~(uHy%JU>l2 z3u~PWpkn!;t4G|In>j+f+3&{zJv?h6pgtRszGaRB2v}1DhY##1X78T=Nq7N_tgi$F z`y$l*WFgr4310z+1AE89A|CmB`#U=C$?2~+XpmhW#^9qKQxa940k-Qy^FXJz2h~!| zgn@>ziwiFdX@X!ZgU`)qcA8{ylDAE|yWLwf{1Ru73?(y%@fq;(xCVdU>e;q~a1h8X1zsqxmRNGI7q5>|+grfl){V8Y@8YYDAvjt;NhPe_6I zPY&(m`K09EtSF-^dMqUuf4^=OU4&d$0JbNO2B`}BqidrF^l5)LdT+bH_#2@LrO-%A zTs$5x=EN%*-}xjb=pmm92N$&sIU@1bWkNmD;f0a^1xJ%d$5Ev-C?$T@Nb*%ll79_% zfS=ZUQhF_ETkZsPjUL69QnY+NRk6>NayWoo5M5S|ki_WfEZcKVc$HIph9l#@3-{!~ zHc3iy{_ssVqB&bWDS7QDU4!hK%iO~ck``Ri>H1bYs&}H5BpugagO%uKFpxk0^W)|B zFX3k#5@3E9;a{AOf4 zW0u%$4K{Tqmps_U!<;;AWt9C6@XeB?nzU zq3SF|^2PU8%;k5-nSSy1syYrqw`Q4qG-?&%WYldBg4SZB-gkaD>8?$zud$lhHx3F# z?v*2VLJOJwnCN%M8*|1{`H2p9>VW+5rM!F%Ef22$dGnlNY-=c_%crLPkQvmZDKmcv z2)mn>O07?55IM0eSkkrMeO?BA8kp!bI0MN{3e@X(0z;)>p;ofiz$j2Q@>2p5%zyn? zxO*FN+Q<*kUp6%?#ulknRHU|JtD=UNEmKD!i?qmNJXs=SK!{H$S zu}S7W088Z@p`b+sm>f$7t{k?5LQ~T?(?C+M`{P~!s8-gvRjIa1m9*IMShz zdlR%|9QcZ6H^yS5eXNH9zwk6P&Rl6XlqXJe4%y>Ude!u+8%YMoWgc+14zjcjG#vHtl&8O{p8Xtp8~KfEOL_$Cng+ z$YZF}sanYn7sOccfR5?9Lzq+_XxLcW_qX#!H63YgbLVycDf*WSH)8-!D9PxSM%B@5 zkjB?Y_B6s`B7_C{7d7@IwN#Mz>cq7gIIQXrfKW>jne4F~K^f|QpaK=40%Q#eTxK%8 z#x*N^#}*fthWb-s6@vk~hF363C~fD~dv~ZrF3j#rVvhCmpXmvb)k`e;A zS>eS`H_Jm`d52Lej7%aY{z!1l9|FBk>UacffN@27gg|d)3Q2442H+r-d>@*tA4VZ^ zR%3@qq`QSuVhsUc$a2jr`Rsr|RDj}Qkp zElu(*uW@wU5c0^*&#P#D{x)(=Dl){1^hfdE1f|aM;>p*HSj`K$?SsnFV3=6pFta~y5#XN^dHzvRRjC=L8L;eQ6Fqx} z{E^7Ij#ZKC7)+KatkMH@@PXT(-OOZ~?9%rGGlvRQSD>PDBG6JdQ)%5cz>nr;0uZ`q zWiRKG((^{hVeZqGNIES{p0hwts5MNT!{SC|(7c22;7aniUR6Xc=qaD+&xpdBX&udq zza|^sF);ILIp}skiEOQs%wQV~0RtTrP*sN_ zyq60L8*~lySf5VSb)RK}t_{l;Y5Ux*4BZyIj8VS#FFZC}Cc(*?C6cNOr?8~Soz^3D zN5M=i$qPG%IoOW>Z5^W&)Rh6>bLPv9g3Aq7+yTQ`D~rm@%xN|_rsD8h0EW^{BXSy| z_!1j1Xd_>BhDC1)33I5tk-Vrn1)2s@ zxU)e527OgWrAk%T!<<3qMQxy`e#y=^Fd`$v9XTUJI#yED^uOh7WiZle{+ih%Workn z6eMToSc_4`Xxt!W$5`j6ZZX7DR6*j6u{;8#jOHO7)2K)u0l6#GSl<6Pvk7YTE+m~F zUxE#?@+^l`YmH0W!68K&2)27D&NduXLVMVE^&7NJW58|)OLl{rAxOcxQseNcw0yL< zAUT%_aV{%OGOn^#r4g#CFl~2-v0azp#Lulmbi!%(V z0_Tmh)O$MZWCo_LtT`oI@Z|Z}m@ZM|zPt}I+G$(OZ7_M^uj_SS#SkLx9obFQWT!?e z-m0GE>V|nocibX@W&tSHr9^-D+qC_w8k&kJw$#hnyNZD(BPS2 z%!xcd-tL6@Dzmr5guIz`wSDNX&l2c!PnE#5;?p9razcrk@ZdkFEY??mzIdL~d)7o> zy%7{3KY=DG%lOZj`QwO8ZqTNWyR24Z8$oT?GZ<5lP5|i?CNxNRZ(Pp$(}+yic9<*b zz-!qc+{O6;<4pQQXRjbEmK_s`@|-ZnOVWN1HbMWD%nl(SDwo(Nq$3O|)a6~>ZKQ~% z3iZD2u3bdRxea83kUJF>k~7N(mSrUEXj06P^3JgjxYQ4G%R<)JU7t59#;_qTl?Pr- zDT^?jjj`zryG&I1noM+Rs=Rp98{C^JW7lNPu_p9Uunr*aGHx|lz@q8YEhe2nn@r(i zy$MBf;9!_6QOtfyiCG4Wm{8}GOAKWx2d$0!J5EXU{(YLE-fEA!!^KCLYdv>S-sawP zLrT*TaZTC7na)PZR8|lsOwBZ*H--rX+}5@7@>8AJ9U{L?M>{oTdaem=Cpu?K-vjE` zMvPr%Ugy^(b2F zK2h%`&3rVS65I6lF})j4cHv>CI&LW@2yryA+AnZrO~%hMr6i+s;Ha+CvkU-s@+~(d ztEqE#ndtEH@LM-kR=o)sFD4`Q+x=|hOq&#BLX~YF)esc`PGNf{W5Sxya@~|%eiMtm z?M}@29*S|F4z<0pB*T4oELbC{U$Fb2sb5tyMRe2fnK(VBcglA)g08oTM{bG}sI8%mm=DB4#&#cc8nZyD8}F?O?VLFtoKIXrd-GMI zOk?5Xr`9Vw>Z_{o@Cqa8eqGrc_p#nHnFVLJ^H*_?oFX7i7jbI3C@Z@|YO=}vPC73} z^I#w6_pX?t_h_!IbBpfol}Iv0GDKPEXD18|eE!=Eob zk6LsHl23mPsI|)>OrJOJ?EWMB;ff7Ze(AGorvFdmul;YZ(4PR?&hbAU*w(Voo>BQ( z&FF9Y03+WL`TS<2*T zTlm!kdfAlt0|V7Wi}5eVlrQHgBlsg<7AIf3py3?{rs9 zJ&GmZ3*u5Pb@_r0#Uv8`Ep%m!UJkv3TO|zY?o6K%I~E!6jn9CeA2YprKxvWLU%v*x z(swv~jkCo%MN?_0Xo|z{y@bl*<9Fvp&23fjmyp93z+f?3{pr_C=N(7a@7rC zrqK~NYsxfSi#V)hq2CW;pe>6-PjG%dDe=eS`y#EI!#jd~2mgG?oGl)gK-7*}pw*Vk z`v;Gos>2!DRbe}P+`mv)*#?b{fIl{ere{Z(y2M{+j|QyC=(Fg&r6*VrEt7dnH(^Y4 zPSF8JfFg6c^4gK(fy1RRWDtAdkSp%`JvyET7^T7Y{OE3T=%%1hGdjF~$oO46=68Tc z5YqI{sIR!xdZw%O9tT>#lIFz2B>ieKx%>g9giuksVhC(0uqV`ydjOnO25`1!R@cf7 z0$%y0bH&D!6j}QVEzZwVzTf}^nPSls5a1USqr=YXQ$1txczoCDl)eZ6$KulBxYTqe zt?3$H4UC2sp4OxzH6eGuY}I4zhuppBZ`%R@0i&|KuOVED2qa^nQGSqlU+e?Dj0YDo zo_YOJs5QcRPBNGTS~$>9{LzRgQ7}k=;2Xk>BP=~`=3w^%NuUvQvc$uAXis=hIUtiG zd(^3_G8M^^>+DK?ZA9Uoy49hn&rnLS@OxNDJsBh=d(1je?!5!x6s6Se9ld=T z?mLJ-VB(vj&j#xsLmq_|D6`n*lPsL1c^u9O4slu| zK5j?wSzPFoRY;%27I(a&vCYE!o4{jE)=e2%T=D1~uOxol^yAAi7zbT8^uME^_P^}6 z|1&*(Lx=cmbMZLF0Xb#y!S)b@&%=9w0^GWViNC7TNyT|w2MV~9KN3=0pu;Le4jYXP z{5i8ZG;hMzEkRtBZ>_p4v%pLF+C*G84_^nfI1jA=^0MMT?iG;|LSBbsXP3gs^H2K^ zFKoZ?9n44)FY{V8VZCK#wG(^Y34_oYV)Bp2dazHMC zmwjD+B@_#8ye{v%BnMtIs*fB{o<(R%(n`F~|>e`dkm^9sP&|sJxCA$L%;c9`rpeTu3)4A;H0328?16mw7A38Gaf%OTY zzN=`uAlKtGdB_Ohc5vxhOxU%&)g95)bqCD9TpT7SNV?-Nf;j68>oS^d@Lt6I_32?o z+>W`5PB{-Jka8YuW(T6-hD&@JdfPAn;Xo<3ATxt|?i$fG_AUKg%cCYHeEKs0$TBA!bg5 zTxI!$T^{KA4<-Rx@Cc+rgI&I&i7;EiLoxBaKVbBLuKG{s(=Dzp%SmXnd{~?!K*_`6 z^n4P!Y65c*kMZRpu|j0!UM%ur9*3aP>L0&-50K`J4FEj^Cor|s!NTw)L|ReC^ITj) z{)&*RzJsL5;1d2~P`8?L^-{qf5;dVh?d{Hituudlrjw8fy2Ja%rzC^IXa`IQjMnLC z?puunDH6T5{LlV2+v~Ca;VonX=K1g-uc_uq3 zFscJy8-ywFN3oiOVU!S&^cYPR9sTL*Z-CqlJn~J1goCzvglh)^A^a3)g*qY+1}i8k z9I}+5tDb{Z@v~TOfXq)k{0)nkGj!OUg9RG+f~Bb>g(ArnWnDNR70Auhqjhape1G6l z7N?;&SV^>+VZ#ng$s^Jo(dd(l^*~ST;TR3)Xpnr!NcaQMEzn%eq(>XtAMFI?m zp+&2Zlv6wSG3fg4#ZWEbFqQqQ-?uq`=nZSRi$_@`Mc~mw<#*j7B<*fG58E3UnCjrj z@Qi8<@36cT$-=Vn6^$$$@4!lQE{!{4#_{y{wa-$>vqQ=XpnjW&sesN>;A&oILi55M zpP(+WUkL5@usrJ}?pkr6U?m|3tBtveZqD@x&08I^zMmo-hIoYTHteQr;mRX7TL5#$ z;t+;@(#7r`ZHW~9s)QfcnE68N@fRni?p3Ng0s`TP=p-? zhq=TkRwV;|gOLloM^PIs_a8rd3}&S|uMK72I@Cie^!9N=+7VFXV?=H@d(yX#%lcd5^QADzN9IY>f|lct&V&w2!JNUB|=Nb@P0zR>>kDGW52a@NQpX3 z-b@Drt}v(EgR*>_M9BJxKZ(u}sY@MhgL_)Y9B^IN-ZV|`l5^=C!ENBe({kz^NX#pc zWhW1(52?+PP`lJxe<3*<)sV#lR!B-*VZ)#iKmL7*P_0p2_}2&;Sth$2Pfo-T6kznx zIG}i70!pWd4VK>y&&NWY;(!Q0g7KPzxAXUezId9Jl4o0jQ`&4+<{GRpOxl5CSW$#w(oXta>v43d8=CyyfO8Kfb2lL(GDmAi4~wye_? zYy;1ZRxJHfY4EkiQOi~>^onX%2Uqx$@aZ^i*ufe{I!rt`+CM))%%cwi-nG0E>BpCX z6$^Ihvfe8eW`{H6PMIMG=G50|$KXMbK?IZNaZ2^cNu_x75JlKSG|dw-5;hlKC*-{B zn}oX!{C?e?o)5JwIM~a3+>~#L6EYLw%SC0i_ynMd;Iz()%9CNmF0uSEe~pq8C&_`c zif5RvIC3R<(3PQRrZ{|Kpu^?A@k>1Yxjp>xvx_xEhh&3(6~d!g2g$d9Fm_i-^nvnM zgz`JoghJBP1_J(NE2-?ifP@HyAz+wI_;*TsXG7EEi- z`W4~Di;t&f?|M8uH9aE95Tfy4KHEHdK$vm2g6X%%ZPT}$x12mCyj5z&JBSqEg#jiM z25eZ|>oKO>aINTJRDs4nF0sQf{m;$A^ATY9!Npcv$#-0#N+7x13d#{EiW>IOALvsM zut5)h+C700ibs$d!p&X!ZcLWphltlP*{jeIamO*9&L&K4HVsD8PLpYAWMziRY9BLu z1VZ|Po_m5SN1xKzw0`p|vWwx%RbV#jV3D*z&rOGnB*c}OqlcLb@^W#>y6v(wtY#Op zbA(=3hdE3Yj$sZeh!YdU?rMZ>oubmCqO@OdpqtZe!3cJmiJUR(4 z@H04&K5v|-{^92qShnxe}Rf+VPcWD^~SF1sB%Lv7(EOcS?8!(u%H1i+YEnxoV ztEk`W@KsCb>*$L|K@o%%#jykI)hVggZYQ> z%HlCiKoYYlPmXWv5UVC#S$1f+i*Wk&gnY$9jwMYt)%f-wdsul2?Cmb}6i_4pU&WBh zlyjZpF9e5mt|>@-OAelcOV5<%*>>PrOaABjiyi~PXv&G8Dt5d}@+(44!Evr9;kmw{ zd>)6>aL>}|0(7&T7mva~Ru-&~5YIPv2WHeaR-#|dQ~a~9Fs{`B97NQ+?7|#Co7aUy ztLs}f>}%yQ^hMZ)13v2%DQ;3Pzuv1)!qy6j8?pydos?XMze_w^;msa(YElXipt-L{ z{kRQuI3Dz__D=|(e})N#F+0$sV6jNI2@hyA&eyw*GFz35+4XwKcl>lF57Ao^9;ALQ z9N;8^2ayYBS;#NKc?~YhM^^z2<1LB@f3+(4U9iCjJRu##V|%oZXRN^SAmD-<#zRTE z;n8=VJ+4UU9Sj?p!e3yz>#Oy@i%`GS&kg6LRfB9}j|pLd61}O`WG*ZjY=q%EHbOQJ zB|Fer2Z=81iS1_rXi7Vy?rn(>35VOVTPqfq0ntD;?=c{1A9y>j`%e?&vjf-0q+FXF zXwW=B6}KelE~Ka+YJEDUdfA<#&^yPotQpv_=c0oeY%psg!tQ#@NmzLeg9VZU z%;=KZ)9oxx*jgUav;J_@XTQnE$a{t$pz-)tlHyz6`?|1LvrF@5QX`=i>z}+j*ed-a z2ANXMB|Hv&bQtNvoF^C44WZ@63JE|S1O(&*;88+uSxDMF7zaT~C3B3Gv|C*eb9T{# zoABB!{r=UVJB|_IR=7n_4j4^Jmtbbv5Y zaLLJGOtb+Ju7iQcpy~18fhbwtFk{#O9fujx^y`0iqkKm*eh2C2fRpP((==tnkRO3Zl55e?INyEpJ&#fL z52_`8{Lf>mBviNNy{cqCA}6|1J@#CG$0zLdKmt9QKam#_Ju2d2S47yrq50~FSV^G9c-^E z5JV*8bs(*5>;D6M;RZWB7fCmEf^aOLg_5y@%?W6h4%N>=UF#h{7L3=5cW3=JJIpGA z(j+5momzJ|_H|HWp0O8H`lvj2_!`Q=(OFyNUc>!#w3@cjUUe+s7Z93aC3)~swCP+8 z#f14d9^qD9cq@hv|2f2Hn9f z7SVu|#(QMVP=n~OgjS%Q;D7!AxN#7F0QKG>^>K*M+y|u`PajB^D@!?4fk96mX4(kJ zcOneH>z5fgNDO7!r^#%!S4C?q#jEU+;rff zLb-05L-h$w6{hOFNrJUuhdZpx79Znno$vPXcrb)_--LZ>&g`PV*Xa-u6L0n)HwlLo zEA-#a9%>U)%1utnqwWa}TD*e+SKNoX{561~wWN|g+|m+^>Gl!EN_;m^ggN-)HP}`NneXuAn_Y@L6fHD6b=6t=SG121g|eE0 zY|K1nz>sH|y&@8$G??_je}v!<92hPh|p?oNU8tj`Ia0`LWE7SN8w}NB`EN`t*h$d zRp@`vj#=TXe#OVELq+*L_BVqEHTBJuJ(@h+8->)kpjJXv4XihO5cC}-i1Gx_Z6L>U z7CY||LRYhk#k@2mZHHN<9%g&m*q5xGe(!Kgl;pA{^0Qu|3Tce464ke(wo^xWowJVs ztaP}tAHl|Mk9PoZqPOb6~*%29^VU zf@Nps&^4G)t#yEuQiSsB9pe?Z0U^;r{r~kOyylJr2^>c*CT+U6vnI$keQyh1iig}J zV!4SAl?5?bJcfPBN#99FUS1*T(E<4m;8rWM$MP|$NzVr7h#uVcB(CbO;-MlH4`GTr z&meDtz`mW7M}2n|j(@%b&w7VR`hgbrcY&Eti=a$Fykl2*Y=4$P30bh~s)t zO#3;`*+Xd`k{j^?mbrSibn zfJ$I5XV`Uc?)k9ZP-nuI0j=Bv)!yuKY9!^UfnfRu!uSv!5LA^sT#QWU9J*tg@9e`w zK+~&`ywC0`96aEOw3ocERp2HyPP*#XOlfL}CVmb{m7qxGi@s*4dtUdSdZIW4v>|p> z<5#&x^~o44GGxhb7qwm#Udg(+bw&v_gOX+F~KZ8kZwYEAeLQ zz^c!K&xd#{)h98*ja3|Pl&t4MK75C%yf+QI6Wt@sbOheRseaT# zWrsS?Cz>h#g6(19fBs2!Izxvkmx2Q90Dg_`=<4EsJ`CHW&j(~qZo#JHuPnDuD^onxd)hNwij5k}86ps=s=}N3YP`Y>Lw1P}|df$!g zB_l8)fMgAQwU8H)CXGtLLo27!m-u+uzk z#(QOqu*&mvne2~edh{WOhwEscD>=f z;6#E&4zM1B9n(&B5dPs!#K-8Jiyjp=X7pA4HVpxRgMR!oWQ&mbqpABBWOEo7>TswX z6~TOk;pJY_Vf4jrOC6tpSCncnt+nnt2B~19C9qlZV_%2Aw$l|08 za!fbR4EW~@83^fS!8H9beCadkHroJxExA$o(_Kwtk`^gXQlVw~l}0u>tW(bkdy-PJ z(~Uaa>~i5G-Gu`=g8Z*{}m2T4y#P@WJ=&v0D7q2>w{Y}DI8ces9^bYozpadWl%y6dN$Sj< zv`Ub4y8&5=9vzlM29(6U`$@|8PB*)Z2aoH%=$`LuF_LZ;muHtjM$&Dyfd>Wu^Zml0 z(#;7b9`#(3UB{jxlXQD-N%4R=l_)2Ub@AC*(w8qRwfy7=NvU2;_hly}t)e*HZb8h? zblen;>E?JCpN7D+4Xc;`e7adT{IWm%@g@9%Lq%A~=VLi($v4b@2LIH6qBbf{2Ty-Az+jW)J4{mkz#vfn?*@E}(Zr1z#m zuNqUW8mtS;k$dS+kW5yq!zWF$Q{gG0jP&s>IePPmxx>Er%W>SVuSzW+{^Q>gGte8Z z{%7T}wnHBzmqQaQQ2Hb5u3R{XWS8f_6F72bF#_Szh*bTF^D^oV%%S&G41VRv&~iPb zp{nB(9~zBRxi5`_X=1R6`}MEk8%UhA#o^_2wONr|M=<4ip)0kn(v?1sA0YBzbPptJ zfp%=8&b@QU->!w-MgYKFQdnY1xyIwa#9xSjP@Lv}15nt?WCR>fiESEXJ8UZt*k&rQ zu*O3l8w+Gv{t_PmQ-uGd!Y}$P?LY_2!-f9wx-$92`ZL;K4pj&MQ~4@nyL8ZmgyeI` zLT|8O>`#YsDf+fOiGYO_cpj(Xu#-%+lkl*Yt?++*i7#KnDT7f?Ox41r8p-qVzjUo; zx(0W|x1vwvugXIWN&aK0?1}lRIx>fmK>Sz&XBtKRz%&_d2bH~KseJx3J;5pXCmj98 z)p)n%oY|Ih$2Vc2FRPR2l6`{x4|zz!aV1-Ol(X_WS#gsyB}p11V$j2IJ*dQ6`@BdRBPnFH4XTj+K+flUzjK@%|KqSt8OQa#O)N7I4~ zPFpb;&X)PtRgd)HKFgl5r}+esBSUen?+3pf{$A|AexnN=D8#&!v6=1s_dy~x4}@1; zdlDZ2+dJ_`THhDW*n_@rbyoRuM^)@>aA-nTY6&&UiQTG@kT@+Q2L+g5U7qbw!gkSU zOEgrJR7g<^Q37M^<%4?zUp=X2+Is?e`$$hv6PTce%67{MWo=)MNwzu|MjSPGI(d!c zaUyBS^l}A95J@HoS_~ftmgR*X5V)Djeg@KqQh|^8Vuh}5MaAv4{?wJ9m;z5XUb)|Q z<%RF*`c6CKnYuw1F^RLi0h~H`bC4-H8-3kdlFnFl!xXHICqefuUA83BN%)~vIMKfF zFG)6;z;0+Rz)4GKVmsf7j{`tgo5+9ump z`yhVC;X-^>p`6Itn}09Jg_x~D_JP4BFKl|ioFiU5(kHO;sBBfive=@B<&O*;L3yQ} z32Fc6kl}}wn1{vAX&TzDBTZf^D4EQ(F%RpVkfin%Kov`W1?Yg-kVx|Serr*Zkb}`+ ztEz;pc2rD}*Wm$)^;0K#I00T*>`iXzJ|lG$*F_TuA25B&72tW>4zJRd{SivX6MbF) z|C{9T2b`51DS*#&${ze1p$G~4Z}A!lWm5rL7M^4biV2n|mzX@CPoPvPdDfPMW*cqb z-5(tD0)3N_efNH8$o964>Z>FAj{NbAFUXTt#$>ue7Fymqd{%TcvL6;&eX_8{Mp+Gi zq8va8M9G8kbyBss1Cun%JdS2EAPu@O)0Py#<*=c;F?-D1IxG_M#o$mZUVaPM1y&Yb zQjq$SfoZv%!^_|5-WYjct*U^vtJMgA7N!sK;hC;3@70TbN6RbO7iq;n7&gha8# zC*%qlDlae-4s@~5iEW&bQ#)JH9tBF2O3eDFwX1XjW~I}_$Ya8iR0EbBq4B(9S7v@}>rgcH1j zvx6_4ibj=XEUM#YS7m!5>Q0VB7{;8Li#Mg|Tu6cPH_&Roe&_FWzeeHAdB@nAtLOSC z{Er`>L;H=&HqtsLTPlg~6F9I41#TPbRkpKs7(}^vT(4C~)oCvWO+;t9wm}2{nz#nM z?!uG{Yj+2uLOj%!-D}*H!1;DUC*1-4_AgZ3zi=GOvNQQC^e*3UNtGQ$kEkjkNB2Lz zlx9B|kZ-oH|6pHb>Cq?#A^&5~C!rt)4$V9`l>w5lr{py$ee4%0!9y)tWopqUa36$n z*#sWOv#&ZwysC><$zM;xD4w&f$XbCWiN4}th7fH(19N`J)v_Uz6?Q~_T;|FSOQ)t< z2Z0;>L=jq;@!Ch|S;W`0G7sks0cv@6fbd}yIWH(M;Y1E3SDM2)5Oj{s+dy$jw{XZ% zFF4$KLZ@t8@X;ppCwbS9!DOk_y3|spYtO(ohp4nEyvU#%GCc7m!6hJm|4{ zLN+ZmPdqEN^(e1GQ@NAO`4@njpc!EjQoIIgS$|eJM z`DL9uqK=nCxvQ5g>BE0Ky+FtpYQ0L72{h2?Kl7nbsVaFFXGDykSL=qoxoFuCLW+(H ztFWs570Nb~!%!EgOxmB2Y4%QI2q`DKPsnbNx0?~NICmJeG}DS-AvXnPD0oiOo8+KJ z?Vc8M(YnLA)_dm!zN8SkpnM0FoP(qXlAUHgJoH`*>g}Ur&Z5J;*7am#Z?ywzs#3P7 z>FhEwC_(Fl0YA)P3=3)HGz;Wj{G`J~H^CH#;vpMOLfP;jvzj%QmPS-xKY&R;z;^&> z@t47hcQJcNocqr6l@`yTqthWI&y?*9WT^7doF`doi7ux4#n3AuV-Fi(n1sf1tu?~5 zS6)2PvQfI|b99K33PO7bIR0iQM^7kEyQU>g4nANHFt5tPe=-WOs~t&)cvBZBd&&;z zhRH->TSgJg_|e~f{Q9XAE}M3EBn;0}yY zLs7?}MC14W*r!j2k^=dHX2`j!mHhqK>5p9aGbUODGlz31C=a5cZTrHm9vCK@LS%nF zhELKB@X6?`h=2CS88LC+;S9PN%aL72FwttBxTix`F)NFWWzM6GhRvG)>nmq|$?6vD zj7()JTe#q~fm-;x07gGv4kVA;2Bo{v7Y?+mEK_wju#51(UT0*YY>B>&p;R*&yOlA4 zJ7&Hw5Vi#aM|yVcJRu1#UxuPWA~@?G;#W39nfuJ*KuBuDmgFd0CQNK^ zhk2%vEY+^afD#xOj4&4Zvh6cv3;n|Z355RPBj@ZT@{kp{$y0X0topGIG-P5z#N@i} z!4&^k`TPmG!;ZQx{Kadki+89KBp2w&>?k{IuPXcXm{v-@dBn^tGk;y7b#LZUHz1D% z^4X3-R-XWcySBkR1OQov(~{$fu5O_TtVe;{&Y(qh1dYrKFJFK= zgdRfwptuw$?6eMWR8yWE;8P{wj|?vtdXAZ7gG`hJGdo~{uQE{Z8utzEmXsa(^`TD< z`B%U&+l3vYft$Tflku>oknMo-DeFxE3FARg@2`l#1G(-pmP(?lLIjJGCBwiNde)G7J05PpbM1a(2mQ!(gsU?Gds==?x=T=GpBu zYYkUK0AXbBtNy$CFBx=U>mEoiuSQ-l10VDy(Z$1#F$dRMY{i;2Up4|%d6&REA)DFJYEl`42tQQ=@G`7v1}*vAG_*4 z_70)+LS72PIPfLRHFzA`MaEZZlb$M^P-VCqSl&R}%(A__W+`PG+HdeQu?;48u`eD9 zrxH8VG!Yf#D6#-?jy@}qi6Xle7a{ESSAS%&)P)AEkY>JC5-)*5rn2zle(3X4YH}zT zbc2vX)zUu(NLkB{G&B4ec641y*mZ44mXjT16*9!Cb=Fm>gTsc_JeIwK9wN|Ca1>Hp z_W=q|PQZ7}FTND@?2*S~o zN4$-B`!y9mX@5c{21{`$eYzJ|UF`D-_Md5W`=w`%{D=w~rY$_TT*WM3M=sU;&{Q@u z-H}fMISTj=;O!U#&#gs*F_v9B3WjZ`4|y^?4#MQ-%bt2?QCI1HTGdT&11sP{V*w8* zl>*`+oK!TNkwvK|1^=M!nM{QaqXWr>2_8MuY(`-4PZ!dMdLYTBgIA46FCK;_LIGI? zd#Mb}FxjK|fWgp=*##9((!%p7+yH^pvvk`K9gvr+VwbL1cmTf0aNr${Flnha$}(e@ z=~V>*BoK+7Ohn+CrjpX&rNA+I@u)0ivT{`9@cXH|ooMpD>Dc?Q?j!k-t(6jWQo}%M z9!pyhsL$T0g9vke&R0>D6ltowHA7}9{^s)zdCf_qN4%wd| zk_mZat-*>_Ni;!Y&sYPBX5BR)QWEJ`ktTuj$bMFve1$Hmpcv znVpZ&gL00>AKF4uDdHQ`j8~!Y$8{)Cp)*tU&?q5-B)fFBfwqBuKECj2GDbl$NSU`j z!>J?=moOF#g*jQ^&13GU??gpcp3-dZ{!0!QTW8ekb*p{V3F=XZyN?|siachQ?uKS; zd;^Q7Ff&gwJn1wp3GsMYWCA3KSNo7hGxH8SrnBJLVIEd-up3M&cI_6GR)#Q{lfG_{ zTF;?n9WWz5-ZL!lG)m?qrR~T0WsoOv5Z&OT?YlUPw~KDt zfrY2EY_^->8~7<%z}Y0zKl5k4o;v}PYMQ8$bVQZQ((gLr2M4`4L@n8SQ72g5A?=*~ z6)HCOfq9SvnX=(FRcIsuO*rYOm6L23umf6OsVwDCZ^MsIt!W!@Ii9@ggNJgL?+7U=MP3>yA|H z*SFUrd^=47*F}Sj4~3aFuJy=wK&Nrgv!4;cj|EW5`J@}^J%V3Odoca@5^Mn4+LA6i zpz@AJD)+qs-M+>6EFK{&Nd`4vk&UWDHV#^)j8%DrIvWgib{jGreML|<7BHri&V*_U zshex-kd)Ph2r^&Ob}h*gFWIg-&Gz{j9}l$dzkjsNbQF7k98deS_2Xj#i|HW_ibuo? zbn$WKqoc(`(`At#2w&7fhP8)nTYi9kQ~Xi6=wbzND;ij-RwE`P9Wmi44}cyX#<&XR z`y06w4{1pfjYMATq?$P%*_uVRZ?6*Mq?I7=(1e9`ylf)`L=N|uz3lj6cc=tWumF9H zCAhU_hk0OB8FhS5ND-E-?fq54wt5@%+tT~JDTyHgrfH2ve(`8 z8)z+DQvg=TT;hxM9aDi_h?|dymc}+>E2h~yh3=;87BRr}O6# z0D&$AS9G}cAx6LO`V5^qV;$I<@c%+XvidL77VTE5)x;R(>K8e32SWg_r@0Quh&*TPIYN3)n&bc01?kQ)`l(q_{g4C z+V|%3(SyF+^KCCQw<*c!^zgu_4#Pv$?C>FUm!&NZK4U^8b_Z^#>}}Q{mZ*b9*MT{E zO8LW)z<&lIl7Kotmxc*i)fJwFSY<54mk9B7zT;MgqBJ6w(u({qlzM)-czG*Y8JB(s z1WnpE17F~D2Ru*|Iz*~t*Vb(Ox`}Av#P|QPrvutyg@$0k2P4_^rBgyq-;(1URC*vH-ID`!L3C7I;8gHUgJ zeA*RXn=Q18WQU|p+M%KKcU9TnwcVuJs_kC9&x9)j;E=ZQN9&#sD#Mo6EZn?7Ql&?H ztxCeP?^Xu#M!Gkt|a-L&LPngYsSJl#8t_ zT0kzqC${5e$sn)c9%*wpwTcc9f!@jv{M#y=2Cg8ljn<#W{?W`z*dj*E`F*8CO$zcP zPJFQ$)zgIjD1rN|zXgCRgfctKrcF4U-Y|8}MKZo;70SOs*oeFeAHbJwlq6b3{QU6z zQxUNxhkiVoQbZ4HIgjpsybTiO!sm|OT(gKXQI2`g(|a}{OK@;zv3u21P&Vly zd_zZs1B8UFHu$VKVb;N4@H@A5&W+Z#!K*iWC=jEPyxTgYSCx6_b^c-Z$tOLW`4e*H zuMPr~BokCNi=;jZ|M@fAZWVFea&Ou%XrOCO{PPh`%l-p9FdJC%M^AO?9|ER?*(&wu zb&!vl>cV|xOrEGWSlrmnHVktJQ1hoS3}8hWlA&ubq8)L zB{WjVKmR!#dgx+fkG&OrYx+h2D&@A*9EytWJcBPO1`$;3K0vCbg}=+H@-9PRYi;?{ z`wQ$Y@})zfc6^_NT?+kYi2jNd{uMdKoPMQwwOdVgb9AWM#a)50C%{shwwQj2;Apq& z{OD@{rggF1A2PbCB&ITOesdWybrD5&y3~H;C|6?}nLg9h(LXg*G90A5nUK0tBN|R$O;oW}@Kx{w&>jerq9ldf#A(bqJOqQa>MkFdyqWfSdiWs zN|hlQ&^4OxlaUO@r%OPe+GQm7s)BGX zs}v;m%k1>>`9J&1?>*l# z@bJ%=p!c7>&1c*i=X8?`PXAv*&C)0CO7wd0ZB{1cXAhHu;`|Ah+5Ynfim6bkuZxg5 z?v?l1csWz61da+nV`PNhb$?Q2>HUA?c13}VC9`WiiIzGv$&Qiwtb??XTIog8Z`r9e zb9_zERa2jH$jlD)voIKX_kdfza8Lep+N_@tfz@ z-D2_k=zc@yZM5tw02LK#RS!g3+{>O3%Wa;xLv}_6M>AgpnW_ldh7NJu0tc^!dgE84 zeb}E8Plrs`FdMm4Otpcqm)ca9@+l<+E>vJH6kDr6u1;a=peibZhM^1-+rNQl&+HWh zJGe?VE1-bO>|MdxTMs@b8ozR%vqcB1iZ@`tHq}5jSBL}{3fqdS8gW=s1QNkP6eVhX zKV`&Bol8z!Z!%1(p)OE3E1=HtM?-ebiMxJIl0&ww6KLM%hV%`hVlK7fCs}Q@k20b1PdM^(!M9$}OssU>mTn$vU?yx(ehg`SRNz-(I0+m0O8V31t6-$aSJK5YnZ2 zp4o+1l|?$#4kZ?nGZ7>Q{nrwGQP+DMz(_0~3p2FuLwvNE zdn2kcU=TTy$hIzt`Z{EH!@Ut4w-DGKI>08{tK4l&4)9k{gs$h`0<0W^k?1$dSy+~N z{x_|W#b&FpBLJ_6>j|(mMj*Sqqyg`-PKxByqVN^+xm812f>Z!@mX1`0k=&c8ES+ne zW^(&)(3LfDx3jR5K~=@V@-C8UsT@E$*zpVbH3$BlbD_tWRWro|iZj~l|?i(UbrkKbzr0TDs`!XxT zILpAcQ4$oz;lU9#ndH|(xP-(_ue}W7k{LVhH8k!ueg}2#ju_d2Z1>SL-cUsld;WUT zO&u`&NH#D>DI+Ol26i$Op7HzlbzpO~q{ym9MEw_nk0LoLg#+Z7UMH`rg#za>iENNm z?GetMBCliE?UH>_x5+IW?$l*i`YL@c_9W(6 zBPIeAQO+zm$Q*>-kOeI?>gR8VAmVi&P!YK4mzf>dix%M%n23flmC9A&f6MgJxDV&Y zI>?>;(ogWDTGVP;NE6wyWQ{a+;Pg!M)6kysqo^JKluWIEmQ|%7gNz({BY2J_dG^)U z3p?ViDw_1)prXl8#fB-Uh7E(9Eb0V!G~k+)bGRuJ4c;Y)JryFWusN%kFUV{>1zG?r zBTORzjRgNz_;4C1#wV`t6zL3WGj0+FM^vG(kJdrbRYevATPYSEcJ0kXNLG{NSb0KV zS97b2q0Y4S*hGVi04gVQGxyAG3P9UdZw{!$@1qusC% z65{O-7jtEaZz>JX@ur1_00BZ+o7fMrB_8lpyk zwjWJOy;Bs*on-9`#Ca3V#ZDq5_A5MWNcSX|9&uPRxWT+96~_w+8puf=!Uaj;I!rg@ zU8;4+RFFYaIJP>bM6z}skSRtqBb)a2^EE$yg^7$b3rlDihP{&_y^n!82oPANf~ia>zV@{dymg$&IR2=MHC1u*1F!dRMA?xl%0p>cR6@%Y35+u9 zliZuiUCIvf$s&mF+N7m>kd{{%zeEN-Ks!bbziq`=w<}pns)-K(GZhAPl*1J<^?45# zqy%7%FqJ--cp{x>e?wJjBrbf}EEWW+WEKupMQh=t-I?v_i)^3;g!w#N@D1&Kmu|bA zjS;dVSOhmI{V!D-gEdPdjSxKS37}&K1~%YGCz%G8zVun33T10WFL>3gQm5*qIymfY zyp8_+&yQb{xh<7>G`ulBarvW1LqWk})EhZTelrYAS|age=z^3#v99rp?p~hUYaYnD zsfQ23#tQN10U4xH%NiRZf2NM;KYYNcT9$kprwXh`7$7qxYN^BU@07Wtu#Xuf@_kg% z3)%7m6rg76WkY~&CLi5BS7z!X;?dN6Bi4fcga`O~0=E`Vnwt~*#^>on0Mk;al?g2` z3$>vzBvR(tn75Cn*W4>PX)YyP>9Y%_yHOoi%dwj@dkN>6rAj>-%L=(S0=X-htYI`2 z%u9S=EU)lK?CPGWZ;16~F$DxZk=EUI_iu)CLjICs~Dztv-?6f z?EzK6eBa*64#2F>nM6mE1|2>6gZ_IVUr2w(%{=)N#vCcy)V`_%bbpThi{o<0y#J5< z#3K9~3p{7??o&a@tw#V@GWn~M=0_iD?%p8~NTvar-EqCgfv1~jIyDG=pSik4R<^YD z7fsZo;tX7J5P;Zd9KRz&AeLK$V>9;Fq_VekYm`8jMD~uV))9*K;zDnINW1j-sQ8;Y z{0l(D>alFrLDn`~{X)wq>e{$6-xDZ(L(^vd^B)TU7a7)x{BMTX&Edb!9awoY+T;2xF z7&Wv+@)enA);-ARDZH%%n2Le+mTmF)jsUCO|Diszp8acUQw<(Oa#sY)i} zDv4Gl3=d+t8d+sjwBqZ6rfOClQ&Y3{j)@uyK4I&Le(pSLvm(>w?T`yjf_x9qgoEV{ zs;Gm5NXX^mn2sEkY9P7xCx`|L$7E4BSk#pJqr?}QIJhH18CT3*V;WV1$}3}2#?~I_ za3-1|0dKhSp4G)#JE`muP>!%8pYr&3g58C>iN(jf|B{Y6yMb=Cu#j$gRkCu?mo<9! zeV^6|W<)viV;V-N+|ThY78&{Bq%&#iS}C#ZU(o3?zx7v-3xjJzelUepHv$V^+Dk!X z{yGSdW+_yCjXi&bM3dbiFa?TxCpamL_fpCaLTv8=;}L;9v0hcaawnh)WN(UBiCTI` z^g~wNiK18%&EtbnbtH>#&nnbv#bf{;e+(uxOF&{M&ZH8mbqhOChfM}Q=LILN-*{<7 z?vk`JJ*r&r^MK3l_kvJBlLMb`7^LE&_#DJ~%&b)i5qOKAY_hU+2iByHio89`xI%|N zL+{Ne+8#$(pq`;_i8pVp4qQ{~L`it?0CE3lVgB^9{IQuj~^2^5d!q!&ke5LN=!K5q9nO-72%r3GmmTE(e{G*I)9%Q}4qYERpcXW3*JKq!zg47cG zY_wF!hogFyElp&F$%^Y9@;)0VK*&dHp1i^0fjkEXStn!z+wTNRn(5xV≤Y2}FS= z7Gi?J5_0okbJs++I;cM`BxsOSr~wKD{Pf(QvoF8=_Q$u^e!CMnoMyw-H;G9M(ly!+ zU2Z3T+RZT%F_Q(k=?ZdW>c?Hpi>T6qzU7fUxr*-O!tnC38fhF0@S!5v$U?yy3#j&f zb>vH1G*nG2v{%*B8r`sFeQ5-Q6pefM`~K#^^pRnvSd zMJwcS1)t5KeK20kxF^GAxsZskB^e@o5|t|LOA6;#NN357I1x7}vNJb?XT|Bme`Jf* z)!$XNXBKr>`wS)U3poo)D4WZz+}isWGrOHI|B#==X};>)q=HS(>zy!FIw7x`dIhJV zQ2;2WlyhVUG$H4#sr($V2f3s4qS<9`;tlND6uk%#w1 zK*$E?lgR;CjT&TihY%%yA$I{B_LZC@2kjsCoATtvOY!X&6jSGuNtY&kv}uEcFu-Xq8%+yrRAT<57G0IXQd(XZK(D9I5w>9OF`1KF6f%Z`YB`h%j=bRwbsODyy8M%_QAfsq*p|o;$0rl=jmR(`Wmov0ODS@qxD1|rd zkSKTvp1G2Oz2z;SB^jA!Abjhe07m_2m~9%l_JVV_z&)L$p*yw<4ATzWaacvCAsWnF z%5o!1a%y@4YAfu{IA^BiWN|p~LU34Jp2)fxUTsz_%z=9EE=V(KDM-0nzeG$Rm@Qm> zc%C`L#fkcXZcL6w+CKvROi$mySspS87NJ;}NWK!zv=(0Vq)iNj5+JvhxrWXLZhNXC z9|UbOxAGa4Y?nTn*!G5K-80wzv_0QFQ7$bl)hin~#w&9Uq6o2xL*TNch27vdr6lI2 zAH+K{fafqIZl;R_Q9CnQL%!cGZoJKK~d^NNP19e_e7KgC@BiXGGW7h!etyM$6nrhXy{1+I9 zPm4|DJAnWv^IFbZdID9?WXygXMbJ;wZeIt94k;D}X4AHRj}bL!#^Tjv8U-v(z-WfK zF|YMEa7*S&@D^EDDPD2^TY|*VN-gmQ_ErVh%O=m@>kK8OEjK2mfisu&A%klrY1T>` zvL$3npOVk(*#|Rs?lN_DD1f)N=E`<3JRTJPCMy2Dtzh9q$TSn76%QjxmdtSo^)A4` z&4E$D!3~1T`J_2>SiU!I6uX56IkQ2LfFyhMpgMu}xzJ1R=kZSLiBe^70xhG37hV;S zN;br_;|8?*AT^y_fyXjQg06qVv~Lm_+7cdja1rRbQV{iz}11d&w|%{nO_d|dE|DMikHSPQbjRcY|9q}XZK zf1u*acwr%D2Y-zI*}9TN3%`pn5iDrBWvg zfU&6$c_lK!4C|O8M;~KcqwSj%|$<3+#xDgN32rWCqNd$Yk2lV-xSLAWX})Ho02 ztrY4b9zPq2esUyrmd_d)#L$S5`Prnk5x#Z0!Bd<_1`7x|<{Nz1d7-q`bed5AXD4X7 z8o0i-$`rmZA(tW^&>4APXa^|sQ(9PO1rs?dw4bdYbYi1o64>Apgy7&+GPfs9a1f>e zAPwUfG=M*y9XDJu~|ia23WLP11X!Let6y?3L*(kF}X(2b*+Er zEWtRJge0;_7|1eH{YV(5RlG=|z^MqJsESe}{`gW}zJ^mk&Q~SYxLD0|NLESXt0d#G zKJ})?3)S2E@v(2vNJH$zWf0>1qaCn9ji9DSgYt> z-4KC{q%>X4e1Nc!S95Z46j1%t2Xr~bG7Q`%Ud3+#oRP+QCY@!PzZX+kL9Ealky9yO zJ5m!#L?x4^J)&Oh~8smgz`2l;Nj5^`YSX83={vNlChGCue!Izj{0x2E9lZaO ziW~HHC2Jr{hhQF95mwn8A%8tdgye15)TIGt^5#sUt8>N7c2fPm0?G~N5WnGboM|uL z;1v_%D{s(nQMItRu<$g+KonJS@th-6L=|qQ_L0hoeIKgbw3mT&fkRkD(zdN{-AQ%H ze{WOeE=43Z>UCf4Ad#KPmJmFdSrJsyr8dAC@gt{dirl{>GIoBzSCW^XQlk@^h)(!q zXsk7}g_kJRp}qQOB5RLxUyR1tULVHsU_)_S1yo)*c*Z04H^)n!VSFJ?XhkR#>*Ixt zf=J%hj1QMoNDm^31<^j>VX@1B#!`HkbL0;CH!|IPF6aO`{H< zeN*}D!}>gNj=`GTqQ6yDNu{dVz`4tjdf-h&yP$M+^od)1CdxqG2QFFExsp$!(1eDp zcE!7sk^N86^gk`A5H%@@93ehsR0m!2$^RwYk@gu#Fi3f3rM~X=@+V9rukvk-+jUC5 zd;(5vdQt%j=y0K`#WR-m8J%%y8d}SS<4wp^E$lf(UiMXT+3(onnE?}8N1^G)uYG6F zKH8z{Oq#B9L%*A~yU#dX(QT}HNbrhA>3lxP1R#yQj`n^im#gAkKgVuYBvDwnUJ*}k=I`QSA##ww(eEEElTbtTsHRg)@+ z;bh7`5IQwdZVk>nj+IrBSW-nn!y#c{1zrVB*gyP7c`{3m!6!%w zo-=741I(hRo!^d>C`&3mX@&$(84^k2GKWfX;8EGlfz?+uxvE4#$HV}(4DTDNnf^mW zf5wk5%V-EZr99{QkI@Sm;izFe!}&WMW`y$RX%hok!Z0#WqQgOs$T&q#e5>oaLne+Z zc#v(YhHSeIZ1DqI$iPdl8uv9f3^td@q0K6=dB+gN!rGG~0hOOg*b1fVG#~j9!f~V= z2}#TsE1U=6v#Lwq2FbZpg?-H&FN!ox=zUuvX?SC@pw6{Vb@H5;4gQFBdnBJU^NbJV zh5HmLSw+LWf=Y9S{Mk6rA7y$`weQgj2b0mwGYmLJ08d#FWc`3=;rQ=D~VtMH}qs~7In~jl*=&D@SS(8jw3}CXABV1J!2IV zM$MHu6rss>r+P73)Du;zo@?x5(Ij4j8`)w?G_IBqtj0>?;IxvXJa#V#itzYiCymF> zG1n<~P~L3NwSu7!{f0EoEW6^six5LL8*5dMEp`hxSQE|FHWY!DXRJd2`1!`(*&df&r@tR2mk)ZnLqX|sx!wPNjmZfUN(F-`_VuP<_b7}h0CT) zmrfShVtC~w&V79X>3#4WTpILE*Rt4DNo^pDfopFli-kLKK~2ZiqQEVU21Bma ztTLydH*~9t0?qEElsjG$LeNPATh#8d8na_bereKh_shC54i$XE2P7vjD)ur$-*59@G) z_^O%2w{?hA{cRzpTzT&JHb`j9^9LK%g(XM@9YG3tPI2m}hEk!A!L?r|^WKN~L)7Vo zdOMPgwqdyuIqyE+iNy9OW52`5asR67b)N^!9P?m`h=?NJ6DrwbsdGpZ|8}IW#&p(U z{l<|qYD1BIY+gFj7DSH7VJaTS3mkPv#&}~Ja0@agk$lAI^dD{D18E&>D(_%CI*uID z1zbMKxtB>QioH(&=DLajiH3Fb#d6&|lh(L8XWX{5lgt(yacC%>i{wPP%4fidGLbQ$ zuv(6k{IUxw)M@5InUV@=0fFm&`4@+Ul4&fIc5LZM(q=)WPp$Gd9IDGyr1QY@UqF~g z4Julx((MRCW&IW`iPW1oh?VNwuzw;693Y7$TPptrZ4RkrviB2pDa)-D^QpN2#&r*X zauP@Z{RTpYGs`yV=Uy=b;fDZn*O)i`1RRY+h;brEalk*8TO!{fF#jk3x;MyfBd9|&Bj(d0?Q+CH#@s=*EpMFqN) zCic)(ow>mNmg8%phLCGx@g3xuMR|r89BUphLo91%mkhe?s5NuVVo>c;nDJUTChPA3 zQYtFT#8H_Ltb@yrWP==cT@nd4n}$`v!`tDhNGicb3^p$40VoHkGxx`r(mc;{pRyH0 zHM3~f+^fL(o8#cC^nuGlstHlg!Kev;572A1(!gR-aiFoplDD9RDJK&-Dc8hG9WPw= zy0Q5-8NM;Y(sU+Ich#etTv1|r(vScZ5U}D2HbZ>=@g?H(H^dTa{Ks*U$_7-CA)B~W zHYEzkC5JZ0M>T07O*l~md*O_Wfixsrq&bO6;+4@j6(B6xQBJ&J_6BofoVNsHQFFtmexBJzUS%~i82 z0q^6pr?ve85K)*%Lxdv}FJzz`sMG?1xhc%LMrydky^Ox#b7po6?y`T)jlpi^`z_2n ze?n^Yx_sWn%WfU;EzAj+A*wmKz4#aO;uCWozQxz|c4;VcA%6Ej^;GPJsYK_!NZ!g& zm^71j8>oPo-`)DaE}ykO@)bJR?nsyV@+><5a=U3CzX+ zcB+8vp3AXf+VON`M7@qtSilkjRry1U zR;tK2eZ*E; zbQd^VodXjYoh|#RfUJG;| zVC8U2N}7c}(j}8O+cxMEGYjoYko~-O|R5VIE;*J|T7(+b|g?Uy9=sEEIU@IMv zE{PG?!YYe}PC6r-gD3>PsKg|5wQ{c*m3kYD$(}d~SPP zgrtRdt1E(LbjiXu}1cvFY)x}_VCBg>gW(Ew?(_- zjwPqU4zEmdXJ1~2hM}=xFI}p7Uh&+D>RATSg8vmTR!Fp{h-$on0}*v&yhPx&84Fo0 zRb92D!=3V-s1NN3j{S0Gd9T2Vi6jz1e?MaR7=lPD2Z|#81BLz{T6IuK=4?EB!&Ds( z3cgL;I7b9FjEpe+$Sm;XF!Pc2+N*|FeH2N^4`-q=`Yza**?w4cz7G_^WW@pA2y;_E z_D{pGM2e%;htD%tdWkBpk-c$FERm{jX$#E>CQ5T zX|5zi`~lh5lNe@_5$4*{K{w4KFgqNZ%#}NKQBnv8?_TQYyO6?Z$4`2W#1MVICUg*?2lUq@OOEg>+z^Y{3P&H*k zen9dR>Lttg`x4&m8-ZG;+G}u_sOj6lfk)kkuPCO=LFjRYIqjs6frySUIGYaq@m(>@ z{S8U-H(U!=IEG;F-(V`tgl^Nmi=%50+6R7S|V$t02oWk*I<@tPuj$8&3bb(?H? zuV|r6+`JSr@d~MqR0&+qkhI1YYgoiX?aWcc5=mPJmbHl_RjN3Z8`ODG?|NW1&(Yvd z3V>vTb7`t#J4QYI@d9anfJ%C%FWc||^>Ow>(wd(7ZU~$N>kM8Fc~-4>R?plGS5r@^ z>iRlzD58Yrd|o^Q`?N>9fg-%F_QIiv~Z}< z@@wH5BoqvB?VJeDD+a|$Po`&xPO|)7bmmZM$E<2GMjr4q$DxZ)&aN!bE|Z01XlOe0 z9r>k^PC7`vMIAP8Qjq>abYdhO>Q0(71win=9&ZR6$z*$ny55xO>bFNHB_*<^ zGV9v0PSL@YrJs7hl>?`$a?|anS@QGsV6p)_pkpyNa=UY2H8iu!ue(CXCZLIu!38n1 zOn^CvXh%DmVR1>?3kD0%rmf!!ah*2KCHn67&hIp4h)6s%YB z?(DYBBvNKc=W&mqvhqtI({^hLcOh$k7=v4H53B7qb)uf`F`g9f<~Vmn&~TP{0PlTS zBXyh)GRgd~+oRGI&3A_Jfgb+Oh`~TBRg%ms@DYZEI`q<)U*9d3+(b5`2 zYE3R>O{SIzJR$#FXI9U_v57|xeiDZ+l@oUDjK+Z>~ zt-Y#PdPlL*M+LctPebX(#y$??1x-V+V?%EmB9-Bzz)dyN<1KW4@S$r}IBgwtG-WPs zrZSjW%c714bzBt2_Vyi;@H-$gyO~X5lKF>0Ct)sd)J`HttY=l2R=eF)cN;9!Ipu19 zPtuiD7R{t~Amp8CNBIsY%~|Yc99lM0Z+X$ZW)+ZjR%HOKcpyUE^=;x`?%*}JWffI_ zgJc{Ec0DFKT|s%|Cc~KKz67BOH;1FatPntd)IGxJG=S{F&p<jz$jBsMyCFu2K{zA55VHO~8keQZBtUxbqVbn%GM?HBl-X6%pY$A72bd*o#J4+LF znTgs^11=tQ5qYR}qY7S&IvGRkAspb?rD_QuLy@1qYE-5$PaYZ3Q5BF&dTQrP5#618 zK52fjTk=JB?U`on`JmEKXT%w0_Eu3P43EE(TSgrpw8Q()KOjmiVYZ={XecUqRNVr$ zN-EfaaU*wionBQP)JfA`d)TPOTW4BiQDdU2#>+dki(^15X5yuckpK7p&E(~)x_d`V zs!C!6?%}yzq$8;?HWyW2_q0W}jsF!%O>F}-3FPVtO_A)A$`y5ayzy7kl?z)YCrz0= z)KjNl5md)t&>ynLfWNN2cW3plEc(LoRssnqYj$smITCom_shNRDh*?n= z4;6pKqL0JG9#qR^gHu%saC6XxMwzW;RB6AXGOI2AQrH2BG^AJA_}amEGKvV8@m6)& z#BnrK`x}lH3fYWQ5oN+bo)v$AGERQNP&MDsE7q%v;F@G!HX!KU;L9GB-mKl9j&5`A z6oEs9Lb7X$=+RFoB9*ygT-+(5&LSyl6o~N`EYdj?MuWjPrOZcJ1Wo*T1hrf0U2i9f zHn}7ztqZSfxYq;GF+1%{s&OZLBb=yUaCDPWh?C+>Y50){KEjd{`b}3=v5j(WGr6iY zYr3^zrXsI6o7?u@xyksmRYykc4gpv;4C0Z{I0H1H_k!t;HZ7?*2qkcoPY%RJ};McO=)&b+RCvpbb-sDp%hx-X`%;WOy890T1g~?s@x+!vfMcchzT~+FsU>QJ1{=OP*%J#D zaWs}(HNd7*-f^#Kiy|7t8(+>KCC^wOrAmsdYhDFfg|h=&PDhp-#PN`IN|%L4LbTo1 zjjP1rbw9B@xmOWUPQ{+bCI?h$bsg^j3^m*G4nt+9q|j`itUX0a7r?uYSG9l3vTZ*5NdmPUUFbOl@O5M?tB%$hoJpMFiAK;F#^e#k}ASpp-NriKusNFuq z4MJU^7XUek)%bZmTAxI+MAEB9)2|K^M9D!6P{vTR822Q%-c_lh=6Z!(^)`+Mfmjjc zBC~8xjR`Ws9uu|IlV-1jJB%CcWH=M>+czt8sVc0FPc&0Y2oS7v8zLha?D&-8l_1^1 zhqxPld?_zq!zrNLnoED3rZCq$hh&vJwU9*9k0bdK`^MaXQ_6`<03PmT!7_-jDdapw zWI2p{#;6JH8F*?e>~P664$N=!pZ;OP&wtvl?{*fn`^wuimK2e6Tck6&`wpx{f{Lw!um- zuRfg?k{fFxgFO}SwfCc$Ig4gEfg|IJpg4N4L(hMFS-(BM><@o@3BQnTH|kLbj~`mF zP2@iSW{>_DLQ6Gx^dOsQA=lF= zUSMILjvNC5``gj1K9I|2oZ~{i`q5OD{s-2!g(1Oss`P0DUBNDUZ2tGZLLNwvFlOXg zF_2@h9|DSB=8-C3@cbt*X%3+-GkcEWD~(>L1WjoCW?(U!;``hL`s(jfS=f7@GcX^2 z;a@I3ho5#3<{CZ(SMcu$HZUvk?jMXl;{s^T9{G@QjUOf;}_^76xuAQ@RvcAZD)Z!ct+6#{J$`C>*K*JWXa%xspue;cI;R2u@6ptb3j3Z=}w+)FpO^n@8?-L z;|{7z?>4&as9JIaGO>#F-G2-JLAtmH+kULjOm<>H`(eVj<4k79|5kO_7wAv%+!s)) z?novX=K4(hxD{HLhdJ;76*j-|WGaok!R^C;^c}0aV$TurLHOPcdwY6xhuGzUeL2vI z=rd~lPi(xT+;_(-3DW=>z&IFl*xlkk(OJjTWh4s+i0 zs{83^tVXZ+Ve~9N-#UrmmZY4t;HfM>^o1i4cI`ZZ$eBi7A)(pFh)mJx?;1>uqxD~V zp{K5<8@mqNOx>mvD2ZkXE4*g4 zaJ8UXkiT07NtX^^x9TdBfh5-BUQgPWuf#J4xft)QwaV;JJNNvxve6HRAyy&XAR+z) ztqZ*~i zGxjl_MBT>@ba4*ku0bpdU%4k~<-Ws~-sAm@(0jK@!4pg?Y#{1guK->zh+K_--shOx`P zU^Od<`{FY{ud-3%6FPkAa;bvYnfI0X5T2Ca`Zjy{)_{F+5emiw~EJtJ{H zd{iXX%B~t{ZZxr96}oxRn-}A+muBGo{$;Jvk>AP4&MCiHpfZ96dEnj?ECfI#*S zBR=-8fK0}m=(8WBV7IfM{t!@koW1# zFWSD~&anzW;-kxDn&}D~YHXw5eTQdgpO`Lwrl#rcZMGG`Uh3>!jRlbD6PZ-wa}SNe zL*Y9q)3s6qv)F~NCxNYxLmigTdB=X*5oS%7!$2v)mtWDeV{3kA^N(cL^?lh-^sR8R;r<_lzCpOSc(371fp z1OF2iB%fLIh$8wi#hJewyN*We%{Y*WRCa@bGk($PW{TF35)Y+rdGTDb7B#c~+xD=d@iIG~$foWq9 za`qhNLUV2QV!)ve?0XtmHn@KBj3FEPR_GhIM%U3jFrIJ#Twxf=#|c@XOLibX?C$)^X25t|Nd9#?E(o|Y`FjvSS@9`9zou% zmvSdf#bdseWEBGzHSl;O&PVk?Wy-xy4hS>3d}|zb;POu(Iy?>f!zk!tJo?i`349m= z9YzRWl*5{w+yvLA1+WD@`WDC#p#KD%OXkou4!mFH_cd~H7>p|?vQV1m) zL7YXc$%94Rm(_djz61~=4<6W_M_=7_2r>W(ed#MIIDV9FmU#hNRSMV!Iq@VC1^s$> zl1cA($Eor-ppGs2l;I#VIh@{AiuG3MfjUT-_7D#$wSypHI(UF?J=y#uS zP$l!jpMMKrT+W^Vn*HC_h-Qg6jRgAtz=%m7xX1gb6Dosw_x4IQpy)*l#6KhdK7SG3J*)TKCJ^WMrL! zp>^ta4@Hw?`9==`(W!+fs--TUH+vnmNVJe0U#5L)9(FIG^`6*e1BDflY+gjYiwk`S z?^Vr4_3(BZ6uMQ^>ar#*xP=3iD77&!C%SXjP7a z@nT^ilA>(Fy!Ac!$TUf@?M&E*4NgDH{(Xj1Ol0fV?RH2Q0vX^LT_%{aN-$ZKz>m)G zs`qp$!xi8wW%+~~a0%u%$UkU%_a-U@zP~&vQ}|_}Bek#>K2R)_4=%vuq3Y&mIXE_S zhlcdbV98SWQf73ju#6&k&oJazk?RaAuS=Gb3l` zDtt-yO`gfVS^}!KYH-m^c*bniNH?c|HcY;4+8u9Dl_-(YD7%=rSct*lsKQ8&$t&Vh z@)!w%NZ7T4)IoHW!D>cvomw?z>g7sGI?(G_Enr49**v*n_MV{|80ewhD+Vkfw zhWWroH?TjxpHY9K^YfslS8cYDVAQX1a}iVFQzMGH+d+f~_gXy6g}f#$i9<-N$D)KP3y%=FSH?Gy$@OF=b@*PO*g z(u^NZc?segLI_}sXrtICd166@s%vw3Ur2m@RSj6=5lS#P)kV~?YZ(p%$bQ9agN?x4 ziKdWh7^DM-ERsCJh!RKq|K4w=1pub`hO`^Ypw*GUel99RyskY%hVU@5iny=X{+`1w zeI@+3jMP<;rz(0s$NAo0Kp~S<9S>K7!ObV?W;`tx@fbJuhMJky}GNAFN zd+;ib-BpDc=D;P$*m@YoEs^y2bPvSC!952qQBgrcry5{+tfPAhRw+CBh~bwdCAL(Z zl>4~Bs!sMJDjT?P82l-tY@%@-$K6_|1fv<-K}M@~OntyB22kj@bNE%Iw8UfnvV8cD z{lfid%vi|3LUPmc;mPn6NLrCwuzLtapovLg%kU(6`8dRe{Lvzj>q+If;uQivnB~Yg zP}m4Hl2t0>^aSsuDjXoA6LIw9KOeq)d-(I^7o=6gjCb_tfj>+gAdWUpTI31bPCitP|toiC4?3) z#KSS$sZ?Uo=RQkiN;ec=@ZBUdq$iI9Q1cQSc0duG)10|Yi`sZ*)4~kRa9QMaj$_~! zo@My~Hr3=|W}5f1X9$&3TL?{X19NR$HjasQ4xDNi>PkvzdDIf#{b$^PYohZOs%NNk zWzS!cfId`aiGvp(61Ocin-Y4Il$^;;StXltFt&~5Lubpb@FZH|Z6Sh-f?9~}hiiWI zHsC;3=13ffT|Z8iCaP#ZX;wXwn++W;asToFesX0N%cbuB)+gMjseEG0HtZN7%bTpW zK*uuGy!@-#sLawlfow18<$yRKYNw(hSfTC=#LBIaDqPScYV1t?i8tNDpBM6tgerL@ zui;yF-(U76cR}~v8z<_DfW1{AkJm1(AyQRgU(7qPXJ67BeTZYTOt-KwMr~Wr^lckt z7i1P`I%(?KBUc&t%rY(2jx!_N@E|Zxf$w2!-iwrG;_y}=F|lbC=JOtSoW9Qe+GAZZ$uiV4{le5YqS%%i|L6S zGB9yllRF+hP8`w(u4T)Sob>{NBX!oGpUw65P(H&jF?KlMCBYfw)VFXW2dGx!u*@gK z1e}c=Xe7-%1M<>Xw#yTAEaR?+F2=?{=mWAovtc&GZ&}E8A&GY4APp_p2MCX=5hW$! zpo*g?3)y-Vay?A{Yu68yK%ts$YHs zZTc4-kJ{jrw4qL7xH=2$>U`Gc4v_sYCw`6xtYbMgjEU$l$6;AT6--}nuy9yN(qJKt zf~CQRuup6(9C(NXUexLsn5A))6oWKcik1c)XEM9VksOdE7Y0%BeEe|9uLg4ILY7)k zL%VRZlm2{{JWOS0X;QSuZ-s*i>jp5VicHn;64A({Oo=^GHAe_^C92eUsyR-N>I~Fo zoUp2DbXB9|O%4Wogbdk4P9QY%he1qmEDJZUlXUVA(BikUC0Rdh^E1NIA$(Ug@Ez0$ z7j{Nv5(Q5?th>H9tI2LXe1kV`WHn?W2^kJvp;q3&7(RrYXYQpB{4}`> z;Vb4MlDJnDRepVTy@RIqsjp>7Hd6%@=+ceTHJ5>`IV>rKrKG=w&5^5%dk11AX>#4M zG^uWiCaNEf8Z#mUF!+qqNB$$8S{@+mPoy1z%8eiZmTZ~XsZ!D09V>7lBJT|74MYul zuE%fxl{lQh*b$K=%tBz0BPWMbe+`laf0bdd8d z5PN>M@|34UBkgc~Zw431oh#b7gIo-bSh7J4-oo1Z6cG$n9!^y#)UK+gLw35agJ>mE z+YcpB6W4Vll{V>XzhJMVHoJ<@vnQwsYfaKSeRJWCD^Zz|MGtOJS66Faxe~gHD(H4{ zSKY}iIk&NaK6#MHlpMg9io9uX9A0KE%w!bWvr0jU@){j0Dv%Mus(zlq#lXGtq`KYV z#D*+*kSE0wSgrwLiBp2gpl$P^|Fs$G?qa$M0(9e-t1^{3vlBSU{Oyf15rw3Q19++$ znlTC|VNJV47ZA>$=Czdyi0t@;DyBG;_;A_~WPZV+sjV>XDmZc=FNUO{OwfF2t@$0@Bv8JJ>tEgBfu%&C16O06YnTBfq!dBatb4p!N*--+L;(`I@a zlc{{t4JMLi2;P*#BH=K&u(^R)mgydkT0o}eJ!{14TY{aTgWn+B^fO8OQn){DV3J3z zy&YW(m2=QMndbC(v6D!9rXnjfFj6KSJj7=rslhXj3E^-8l~uUXm|{oLW(xa#(*xx0 zkB@Pf$vCPsMdG%Gx0O4saAXQrKB>%TP9{>eLJAXi*lq)@c_nYRlc+6=Cr!@V zj-5NWaqJ-HiK(p)g}lTmwhc-|W6Zy>Zotnv3#IE&2h}xEGl@tC8|H*FFaO7vh|5Dq zwP-7%g_ius=*sScdG@=ELaR@Zj)Nv@AZY<9$Voy)PE-xUk0zr>P<3oEz&*8OmXOr< zCYp?7to8Gg$!WeOr5%W#m&MP49ej`IdQtMY+YXhhHPqvI*bj0*ZJ2UcYY zS{F)D*~Wrh2NVh;?`NWt2ERxJWhBMpE~5Tc@$`otKY^cIGlA$Pc(Bf`nKc)S4%dkD(?_!Io@EkSX|GA+XaB6|LYofOJ4 z9Ic?I?=!^bN}WDY2%&!fDWHdXH1y!V0<2ZyuRr-IKY{v+St@^~C)7HmF}nf&?g)t) zf%%Nvj99CARCVGd_yFnd)I0g{CD_2n&Bli!{HP#OY^s@g^yRX^=_R=kUvzLNX69Hq zKmTUXpdc-oI#&MoCGhgu89upC>+=Z^K+UL|2?2_! z61T?w98jR>yo?3G==m!=b8?8(YeAFCSec*Q#4rHWB2Hll;D*S(Y7@O`m}C%ZVxEkp zo|!pRe*eDCP`hU(GFNqWv&sFhix{5?pE)6w+|%b%~mBln`<$hsbtu z#nEnQ{+L3`v>o}^@#w+v1r0&kScLcl-y@t{M9W;C7k_;skDmJoC`jInf>0kP3jRQU!h+C2Fes zqZb4w19Mp@6#EGfK#L`AAR#J*pqnMPwg-X`ZlndM$IRv1Feg-)aG(>EH-^h)^9gz5 zV1+}gX>RkJG*8}8|4I#L9s=0y9KJlif$v{hkNMLka4GLA*i2%qU09d_SRVk6O}-v5 zh376F1fyhT?=+qXI0=m^jX~s|k>}ty6wekYFzgZA6J{E^jza9ZRfHyEvRrEkQ`HhX zMz6^F9?P*Z4I6+gV*u9DnjILPcT4T3b*T_ zeK57_hfqa%d2L0S3NKTx&k!n}glxdPsGgt;aXZ$#eEF6>SPE*fz6pK); zf#)sT+Tmvtl~RIX&^d+5-`m}&686?UW1ntj)VyMZi)6{+K|hBvfYClR)5_4Tk0iFeA431Z6cD~*!!-)Mhv|h5=gXF#spQ20O|;LSi+jVY>}wIGotO+(GOQ4N1v!`&lLP zX>LnhgOxxomTtvN?RSJhWsdKQ13yXa@D-#07)7Rrua8vD&~TDjBoyo45#udau5Q31 zq9t1L!Bw6zGUW<1Dktrl-&}>N%2wz=K{Ootqtp`hn@w36OmpVG4U9}RjsoW+^-hjU zx=QZo4cc%r_4w}r)hw0eylKjz21_Ed?OZ9JuPUU54^gc)g5*uT(@Ft2$u*7P63)J?KqWB%=b7zumU1enJ zX89nIgl$mkSVq&IDLD(4^r|pnIVKsJx^jgU72FNEV`>9eksQ%FsE#KUl#<tdFB=w7Bnz{T-If1h zlXEu?B4u-z&W_U;j64d#?_NGq)e=|UBkz>VZK|Ns#{#xagWXsooT##r#@7Y4W(>eR zf#kiE2WS|>*d=CU7@Le`8o{n7k)jL~?O3?OrA^t80Wj^@-#vY~5yXn37byeaZ9x9< zyEe8muwrtvlD%qgWRlt3SILtOlE~JBTR;bzWf0AB(wQ7vI24(%>N|izNfP*c{_@)& z-(Ep~Fjvke=Ooh5?gja_%v>S-=l1Z&&ldZ{ob+1&dPIIKmQ-W010ph&ZO|yHvyfgH zRa1Z+119iN5(hB^yMy><%lFda;-i>^FkzzAWyih`u85H==!2@DAJmw6{%~7yU?iln zCNcx54Jh;_Mq^BMtYB9Gg;2FG(Vq2 zl>&&;`V)s-z>Rte@q*WOnwgA#}F0Px2YB*8UyRFIUN7+?P^5?3mcj_DlIe7fbAiLWSV#@SIhDI+M7IofYF@|-{fx2X2D~OGEO{_a zBL9+%OB1pwG>wF4hjP9`YATgxk^@*DEcDtnL!@L+{R-IvPX5a&BY;2sP7!QV|J%#ctD%hJkNtrz%3N^0NrW+krs<=av8? zEGSC;3g)+Lg4`MM>8LFE!CLeq6&&}zp^KkP34?|vrE(~zL{9xS=oXE+p9}#jfpuV! zz$*Jd2Hx->r}i@gjr>CLyiCILI!-Ud^Ro0|q{^jzm?-We>x8VpQnk*ko?Qi0Jc4Z+ zW+NMZWxtOM)(eN+h_XBneRkF8JC2a>0z<+{Q~NB-Z#bOBs>o?<;2BI5(!YPllCErp z8`P*~>(_g~*UhzyzmgwQb2}e#+V@~`?bu^}CgnRO!WOP%m`+*YVHnb8o@g90mvDJG zf2&}~U{BK|d(O~PJtpo)C()0_8B2a`(s5pz=Dhr#9>0O*ldQ~k_?>caj|LM=Cka7j zg-*eg66PF%mLxinIDZf50<4*p9VaBVo9F4D=?nLw>Z>Lm86%Og3=SQN6{ybtXQMWiC|>b!o$*VQ#vCx?tn5 zs=qL#WS3L&Un5E`WT=Wb#HfQJkxf;)COFmIl7eKn<$qtyQ7nzA#42q#G} z-tgFsbyJYgJliN=G{`^t_5>Zr$N?U#Tqu8>6-@BB+3Z_3RMlluU#*M5WppGgvDh*a zprmCc-QK#p|FT6ZjiZ`OC=5X`s@{E!sNQ8A|55uzK^hyPd~jn^RH|goFa*5nOwz{z zbC1hVIbx=M{$R^2TVFxdZ&yzh-90NN1hPp%C~D%;8FF4VND*e_KoA?xr;p>9i6ciR zk?jTM1CWM~IGj7tPjkI=kVkjE@OM`RD9rOV0MU}glRy?0X>hK4d z1LyY$9?ezMjYrY_y5u}xrl5s_Akv^3%o!3#LKfx2m-9)ase|JvHovlJ3fu37ED4N@ z$U?Mz&X)hA0J2w%H*-7XkkNoxM%KvPG`#T*wcey%MO67!wtc1I@_ zx6aSc8Z)={Rg>(aB7m>3j<-egsv{%-l7$FZhg=X$?wd7`UE$?C36sP*rQzWchlvda zLU2Uo6+fG*bebDvRk=Z);VHv zQX;K1uOg%M4aOa@1!2+@ggad3HI<}~+R#jm zjWs*}1@m8_T;fDOd0J>rkrNN0Eo5k@LC(%~Y5;8spI!cHar@Fe9qs9>GDznh?AtvLsO? zORMBeRLVUWqNZ@iJ4{gJ>Sjd$qp+F{hO=@WJE({wE9QnAREaKl9}KI=oMbefqQl4( z7Bx>9COebprpWt3^V~OPIEo`+so{r}VG&swj+}tWHnSa45|R6%txTc}U+gbRTNWeT z6Pu}RqIa?mv~r2{>qCQMqt9;5(S3EM0iihANPfTR2>wY!g!-ovG*KnEN4W2Uv3Q{< zdTkp-Wl_iB3ia}dd)-y_x*Ig6f+*hRJt`Iro+MLLmvoY8v=E5Tm70$rrDf*e8qB|s zIr+FX?9j1@>3R-7C**W}_&ITn=1c6Re+JTiSx3jk6RSj$cHkAqOz@83Y0@N%Vt-q% zgiC0Im%@_X{Z|;@hSF>sGH8kBP~N2lM$&^OArqQnEXJw{G5DuW5>-h!kdVAu5%g3e zYtExUp3cz8l+!IfLH~_lY%AI~Fpey)(3i{eqwAys6`=1FWi8lf4kAU{j=m%pS(31& z0vY4}g3Kn!&>zg8pzTQRGl*+VqiBK)XNjEtBAzsS`lDViB4Xo;@_Uo9LAW5_I7Av` zEwc0Qt&*rB#HINuNpjGxFGloQ+VEqMIP`D%9wtytm{O50B{s8(5jg%b^+6c8_eJ6(yKP2g{-a9g9d%V}A}P{3$FklRS<=5($SBWR33dU|%88 zka1+Usv^7XI3<80Ev%47%!QO6*HjhVQ&;D#%XRXlL4^K{wD0JHlHxUZ0!PyRqic-f zX64$z&1f<))v3P9n4KC%@nG=H{OBfW7A4fZ0gk{YHdr8?VzErqNONA%1@txpE1K87 z4xBTm#Hzr0+;+06PO_?chccPMGTaeX!cp9Jn5jX1%9q&xl1BMLM(7F|=zzMSk|iAg zI-<{WOda4kKu+tBJD(q*wnF>K#045k2uvttkQ}~3W5G;WlU8I+%4UBjT?Qy(0dFag z*0J*;0?2fZ?fFx^Q_l~W42fBhKfWxZQmb11XR>?w&$tt(ibQ#XmW~x~BHJ}exP4rD ziYu>yygdfVw2+0rrT}eDA*OK(d4eW%HC3lvr)^*W(?-1S`Avl)@_~ZGSi6ZT_Kb&7 z>X}L$Lnv}4i^wjB!BvpQc++HOBj%C&r85K4AP-Fy6+P2L{LB^L#fcupph7aUIQkA< zi6||4UolRZc5elQb5UIE2G5x)Q}&p-RN>5JN0J@R0t%?nsF(5rZZ>L-T&qNY5R+x% zjx6a}DFgO-&6P3Ewk=a$g9yQf)$E2}*rW?*%uQR9n za_bJI%(*Y`?oY|?bR1(^k7Q4qDO<0c=Bw%McXgOFnhbok+ui2^^JK3WFzeJqbSYsy zjtET=>0W8o3B9pi-QWM1mDunvcA||0tsk%K(V3et=FY@;%_|}=l;*d*YJt4CP#l8iHAYU)}{;{HV}*Y0FL(60=Vsxoj_H=?0sXM=sPOu?x@3!q+ficLIL+axoW*5(URs+ zbP+W47CfERXkE#xhl7v(JXgH(vEn@tHmFcoU`@?p(kD#?{dY+^-0wHC^*jI8V;2Am zm%n48uey`syLuS}_o^XDE#wq)Z^D@SpZ{zJd1MdQ6`{H)j)VJ#We~4DGjl&s2sv!V zLn#%JRI1)Ckmz6lrogAKL|TJH+L?uk>Z<}MTA}Qv6^oggyqIM2a=GJzsctzZIj!kL zM!inR90IXBC@8|^n6tKB5HOFz*nNXsMg9wrCQ-2SWE8o9IBF(GrjmiFqv^c;+?^;PccPgbu}6^J;x}aXh*9kqKw!}v=+#o{Ob)FS zGb_i)NfU-#b-rkS+*UGqNLJAy=@V7m)eTf$FKog9n+^r6`(VqNEitQ(dkB2yr~$Ra zkG!kvQ>pq8>PF7--25O-81>m}WxH#J>SWK)zl9Spm03Q>?hAQkz#55LH!F-%r`~?6 zHNeL5D>lvpJBtuW{h7O(-R|ZnVE2lZ?9kqmgI`n+k?lGd5#{CgzAcxrs9F-iY8xI( zQC<0l86s45=9OdK!~f83;s8J|n=9*uIaC!j;u}~gv?v;m7>rZWf^W%dX}hd*5*HoZ z-PD}*Fe^ia!CW{UpmWZXb>ibRlw}vM*xtb+6X+kmx<@0Ve)XV;Ckc5d$SiR)y8e&m zhjbdj*?jg$hUhkV$U!8ta*D2HYb4P$)*{|?iKRWB8DN=lQy~BXMDj3INqoyg|2ps! zS6{C*L<=jE6BgqY^<~0r*PlOIUNS`B ztn$t$5hZ*_?qoT#EvKg8+_J`*Y#Suc%uDf_?%<&vr!0p(MPUn4cvVmb?h&V;8y)ql zQnoq#D(YxG63TjJq~~@2DJPNPx@=e}Kzos%5t)@-A1~>t|5B21g`0z~boY`p8H-&J zxYHYENo@E8>9{}F+C$uy*AJF^=m(MFltV`VY|D;ToHAkU=N|T`EiddxHTY3yR{Ero zRKDrtp-O&cZ3)V&M!t@L7Vfn4O5W;{Q#E!z%`vlkKxUR1c?JQLEN*s85H$nDw?*E0 zFTC^1o3CwE`Px2#5Qspn#KzJocOdCBnq4%JUa^%MNm6g@$TE92&hAs<_1UW4ZkT0F zT^fCTy@TVW8jN z13CmNIpa(!_3F$142t_uSXgX>@(!xPY(E{{m|>wKux8k6vayj{I)zD+vjgqvSWOKg zeN{5i2#h>4=iaj-U6iqkM}S9=xdP~}pG%nLe_X)Xf2nVwy<{$zX;2W=Wg=aN9{RRJ zksP^+1V<~l;h1aau;4%#7U{vU!BjvlZlOHLlo-Dw1dFn=q~@1h z%#q87Yi96C7agWavxdZfZmI}dcrKJ&Bv#l4_F|L?7<3OLZKm;2&AJDIcoX`SXkN&Q zJ-w;3ddD{mhYqxstSVcwM-_5>on7<~z(B&JP(wr=CeSNjDS-^Jy^mY-!C#Itg8Mdt z*lF*dlf>0=!%0LWw26LnC0%S)zZo-U$n}13#x=_A!TRPh4YN-D_;`|7TAfigI};4p zj9E5#4mUoC+GyNwq~Iya@~IN^4rQGsOcnipmSyYg z{S4;9u56Wg6gE>$qzkl3%7uLpAkW)Z$_g~t$a6IVOywnwLI%!JF)m~z^@`Zdi4)mY zjmEa2YW?NS$fjQd8s9pkLn2EVF28xhtrVH;fS2Lrmy@cD4F|DM%jNfg7GfnCy(%mn zQ{fX?_jG#@R-Y~*R$Sv&l7v%w#`;mx9HolX`cOqf?e(TI0wN>s!6td~jydU+0^_ng zx}@Kr5)`H2JDbQYOB(4GbjLlh}0 zGV}?e`MBr|Ucx+2|C~6ScLKE8a6mAQ5qVX4*PaoO#*I)nY;MBm8Fx9Tq_@d4H1%Q; zBh3q0XPD?0*$HiwF%mAdqbdX(_cMv2?4f8>c1R(aQJj$pD}}mb5v^{`DjU!zxaMlA zPB6oYN-%2Lz5*ZMSc0*dtul~383(0_PQgczg~GA8TcKr8nncNvB`V~9%&0Y1)GO$T zxbh9W5AvWw4S*l@t*Mdv=9)8!yLOrLbKx3X6JaCvbz=cYOIV1V&;xPuX|SMXs)n}C}Q`j z@x#H|Y(%Zd(&JL>0`CC7Vs{@n1+jA%JaUYY^X|ObWQbfcHtZtMNJWvf!0gZJC+14| zjzycuSAt9u&U9+WRBJMeQDQ2& zW8+s&M7iQD56;96sp%;T#dQ!l$N`vm9LupgR%kgD3u(r_qoF7_v3R@_M72R*8!`hA z+jMX`YA9Kt1Hh9g{~LSG8OexN*^BJJnc2bV6nRrn%DeBVXjj*k!WH98M9g* zLiZ;d9gv)4VIv~h)|I^>)t@x?AIaW4LT`tycq1F8g%Y2*8vmn$EFREZq*NW$&aFrieo%hj~F%_l?R zl5xD~D0$NdQi-arOm6xhVld(1JVnYqXayDW3M$)xUN7Y&x|DZ#CbCvUXy)I9V{F4{ zNz^fJgF!JTz-norsQEWGaYq$1yw2D>oMbU2k;UdZIJ#&R4pJ(xu}?5Ngae2%uZ7;A zm!oBolv^82yC$7-Bt|*gXa)KO)bGe6kYkY^(N!u7uIH0TN!!T(-OjDB_%pCs`5S-c zBZhYrOJ7LVF?R55K4Ly<*SAKd3j3INlH|c*+YSf8&WbFt$uWHrPU+!0p#U`ii3G~k zbC9kQNZ?q98za3=r7S6u$dclybCagbZCNvtu;f;?0A+hlr0;7HB|6}dh$BwC!u$`W z9({p9igf)2doHd@ZzTpYOV6ZtnaGIv5r~Q#LOio-rm8U+JG456V%0dje;o{69Q8B_ z9gu~~Wj9eU`!-+}QsBrjYw0e^NSC@*<(Ru;lQiBiqlySRe?yESgt6UBvQZHL|2b#` z={)I$^0m+SO&qscqRay%s6y&O{=#!r%QdIBbj-pGbDofRJcRnszj6G>i3zGqOn@Pz z7?*c{#>}ycq1~gK*VY01Xi%ROdtgxP_Z`~2Q2K~7^6DmK9itkUWWgOQQdt(lqtZFL zs=|U`|69p|09nUHjy9Gww6PZhyS*fGI2}d9=@lA(QO1;6WZSXfQkRY)(^$tm8W2Ly z#u}`_c`h3@ze9ruYW@BAQeM7>OPK-1h? z`0M`+N{ULKn0<90eC}7#78CJe4l-@e`Hpxl68$eXx>|Jq5m-DfIZ{jJC7%A=9{%{* zUl5p22`xrHQFg&k*)JuCwTB7D%sbvPy48qx3@u4Av#?=mYAwu;K~PGf-tq$sJ}ZIv z)yVJUGi4@TcBud1_V)9`^H0ybNW6@pxqQs&Q>>x>r-Yu+Gx1;;rk0pVXvt_g4wp=*5^bN84TrcP_gSq+nhm>;|%gCyfx8N_*EI*uoU45pdgfl0EQ zf$c&est?FrUNTi&!>B&;*#q}H@{ESaGjrbU_-8YZ9%TQ{R4WbjzjGpcY4k<{TSoQ> z?!?-QFeR2M*I`jpxT^(07H&=Bh7xF~(z>;Feb|g@?$!ts&!~ec_oak|g3T15*#;s| zOQHJEhm&Mo4+0>Ul*xt;3uP;Xn)R*VQexk>=oo=_x)saZd2Jf}8l!Q>U0^VrlDrHw zWeAoDadWi<25X7NRwT<~5Y$N>d?Oo-OjeVUQnEa-`4O-!*L3C3l$yfF&N#-EEUZmZ zSnr78!h#>sMDPdJ(KACkAZ%Y~CIo1qCvIx~*dIHDB^!4zoiqa~U<*xN;?RYva;=4? zvw$-Eg+4k&O*xWO(k1V!rsi#pK$nq}$`o?p#tqm~HFD)g6;>Y_8%2b z)L=aWRnn{wnxFuL0Ylmim|bBuE?4hiFNr`_IneNh>lcyUjVnb!QbM8%5uAudjd^A; zFIl*;C`|uJ+}SNTxxws+`@wij$8l*B^8F^)-HP34$kP>NS}}V(L3kyQH*k?$z&j2> zqcF0p;gIcM%EkQNEvulnHEa5!R;DV3&Kp!vbF(?5prE=>ypZXEV$yUlaCk-?&>MQw zR6~~_KO>OGzDbV#fxlTH=|M?E;lu_tiNzsJ14M3^c|^^0ws?|=D2}>7Q5GDWdvLET z9|97|iH2&R0jQVUj?dU5iMtymmpKTw=61~yE-IV-si=S}mjW0 z3)|AZLm17&IfG^L+@xqvXG3o^LZp;^#Y@j*f8;c`3h%5 zr>uo50ivlIASlh@W+4i23Nc-1`j+A#e~OUP%t>c9JjGerea z9Yi@zTX`JURXL?Si|7&?Rw)&d^Qh{Hk>?IO4+jHZ<~I*xh}bfJ2Y}LIGYK1um@g|r z{>e&BG^7Tq8IJUVfDNwj5E&J089$mH*ms(PxsZ$~5%mn|z__Mb(o;9iVBdDFkQI$& zL@@}wSyJJaoTQ`~#SQpD49CBO{v8&92_oG^5eXf4JcwhK;@f@R+3-AQH>jRSQ=D!P z2rJss6nrlcrV16VvFlzm6h8+YC!E9)vY~jtI}JStBJ<4Q@rcCpOQv>VL&ti}fU!QA zUA{hfK?Zn1PX7kmFu38KFvT>GxCI&@lE?+Z;OD|&xe~{KTe+0`Z zb65oIkfk?Th%CQ1i0JVfp1^Z(WyaZAq)9B+P?H4F`l{huj#xro^VLCK22GVrB0(L3 z{5hoqs_r>mJi-A<;wa2jr3`th$dIR%6|XYjOM`nMEkCuK*LV`W=Bsye&x)5eZhizN zeqoL!y!(&4AgbnmINFom>tXdluvTH=9}}T6jl>s3?AJGhqkk(nFq8fQi5R}HSiA?t z2gDr}_WZ3nzpi!A6>(5L=%G1YB|6|acq~aAWfQuev*-ESjS%Ke-#%aoWMJ7hgQU#wcwUj6NnYfdpNQ^ z(sUK$e5G~_*<=zsTr@{h8}oKUq(UGemF`UwNJC$hPEjT_6@e$~0LwF8d7w-7xaPE= z`+S=1UncWRvduJ(k!R30^ZuMbcEmCF5v`OOYz~k@?)FuQlO>q)nne`%w~g6f&^%iZ z`NGazozuN-xC2&X_I6gmv01`weCc~~l0*n7eqa@DFhPokHurRr%%V3agyhD>_|Y8Z zGO&PRhAwBox8QPMkI`b$K*uYJN8^UAKvPcne!g7-I{5V<9fNjv`uHFD_C;P+Sp)?f znvVX76^xB6iL$T(Qy~It&}eWECUTc&Ws#eY6rDLpsv>56|2m_Nlzl+TDeQ1g6P3GR z(;H4G@+2|dezB7z`E-YluB6~o6vCM`^OOw*-Ln)Y4ZjH)FVZCDUU9>@3_hGhfwfye zloB%Yq>5JZBX;BEYO&=WrPwhH*N_o5n0U`oYr)H52RbCIAZH z#sFS%2mxtSrV8uW6V(0$4sPyuj#T*$_GLD%Z4`BlJNDf9Y;$L~#>jp447A&cGx5`S zh5fu)>QAWW&sKtVqiir%>L-H*{pr=wdDHORHs;xxpEY)WbJF}~7^@U#VsT(1KE0qp zGtDmP{`+9o^Z;jO7RW#*{fi%*36 zPSm5Z`)i>WconVh*YK@xiaT+y`zy5m&(x*_kB;00WU33Oy^wC5%)E|K=2{>woL9rc ztQs@w15H+$X8g;|tNEr4Q6dkd9 z0cz)Inae^y9KgNY<6i@!GhW(;iMx=ocQ3hI&=;MVE)5Ipm@W;*EXJJ4-J{t&C_H(t zEcn=28&n?x;q*`{S7C}e9GpFtB3ZpkYS%440;xFho}c;=eU<|S;+-@o37~G5LLH&! zM~_L%&yOQeD9mM=x!?T^L~1iPYw4DI?4aeOsIeQs096S~G`2eh_8Hs`fA(-`?0?7J z`Om#?903@^2RNPUf^j}b{5-eael7$?j-kw>AKR+w8|lQrTsiBE;kpp*7l>f}3fWW@ z@};rIUT%r{xF}3AEbqN23O5^wqT#?X(m?tj?nFKc48&}ke!vI(k`0Kk_7HW2n0z;SwAEJuB zPZ!BsG~Kxe*@S&{Vl;Y>Y(lQ*e~b;sf9)T1vQSwiO% zIrC6mGH?jXGNLua$}{MyNjm6vz?kRCzL(g~*(ubei0pL=S*TjN2L`{+Z*a_)wu8iI zb2^D$7qH6UShH1APyHUyS;znygBW6FtlWH5x=fw;L7N`9>Z%Ios*fkhIN~1;qlUo< z!#8T4$+@kA{JmR)SKWZ`yrm};c6oz`MCTfX`iHVml?68Ruenm}vBsRQBI-P#jB=EU zf&Up-S#k?5o3- zyzLfEs$2BM01d1pQa+e8^1&Skl^5zDNAsz9Hgl*el1dwq9nubd+Rr5f{l*mH<%0Xi z6k}##qN0I`4x&#%M4$H(53(8nr`V+&AkW|`Jc2L%OcnJrNJl>hj`@ckUCBE`bY+~M zWdPlbJdQ|(7nY|`q~!DSZwBGNxrEE-O(-mInj{nn*PB(H^Qe_mmO&TY)2MHltwdqy*}NY2beT!G_;koC}i zrl)V+_Z?EA+6oxqJ+osABod!PNiICt9p3!ZnrXB{73uJ}o|EQ!-eJT6NfyeFcXIw2 z1l~CQOFI@tTNLmq2Bowg zcu~dWR}5!={y87;?e}r`J8=u&BNU}eTZWZLauP$$(2tNhG{0v6b>whq*?fHbGA2rs zr@o2&>LRMWnfvy~7fVei7W&^ck*b^rtQ8@vWQ}?bCJ$H6{RD)kP&1}u)Twck{T{&> zf}Q7`|AGnta(G6HlBkf1Z_O`PXPE#qB%03m<{!`3oGYHs-f3eC9eud2tXR!sInkH zSQ6{w0j5w)N%;iSCxOD{zXJuiu>&a0^r=nC5e%_}dH%NnCV8lmKLMuFCDK|ih`jZV z_g>Ize}BDEy&3QZ;rgei_-9{^N1a#AE)jrmg&xog=kra`A?hfR5;<@~V#SEKeZ{Uu zEHJ=P!Tv&aDX+?56iav4E3Tl)8Sz1Zk`PgzGB!iEI+IemXhPTJBr=XYxHBdbw_HJe zNqc^1(2(hak)Co%1Fml_3ZKcs^840VtsH4OQ) z(y=)Gw@u-S2P~8{?4(OK(O-gGKQW(AvP*44NkLp%RXa;W#=`?cTuSe-XKd3zE^JYD z(uy$@ZmI}Pd^uthO7);?zXf>C(%A^;MMv(Ktr~->hrZhY z#kTzMWf_pELbdu2BzF1Fxck#Y*XO_=gd7$8g%0zU*~^@|PZ=&qlJY8Xw=4silA9-1 zgT*C>`8BPwXj-x zH@3tIz@KGl&RCDnT&XYnbAT1z5;JEe+Gr^K{mQ`v8rtcG5=gna@q2pw2DVeOGF!>q z2np*k(=@y3S8<{x$1Gg2RX6)^#7JS&Yc1(~mQiy+jbm5HElezH5s5GZi7CZ9H1Und zC}E=>{)8e6>a7fNFpca}q3%060ZZQjnTb0F9)Obw2kq^Mtxpn}8YjKVCRaMSbUDj% zr8XSC$o+1>(u=gga+UcjpHvYjkNuTBFS(G%6=Ez?=T)2gL^$_xlArdO^fi(%M`YnV z@U|n*tJVM@6SSEn2qcYs|5)j$?OHfO;8$2ElKeMB$smERlgbnR3E|Tr)Q@^7F2*T} z5~60HaHBfMYhEfeah;sf5FSCn#R4(E1;iTQW7(0^-7S{AH3Oy*rybW+mjQpdq}~y? zD$AQoS_fHpmshfTx1>AZM2UAZrYcf`=51gBL-KnM7`UoD0FGN+GE0FI;J^hFL*Y>l zG~ra_NB=!OqK98TBqwSD1Vw6NHtPLf&nnK@7Ec=La|A0eR5p4RF?#~(ZKe^_x2;|! z1$%=6?_%$C*dzgyBs!8j;7D>GE3ej;^>qZ5jsp};-XB8uS5t`HHZbGNft)BACTV$P zfnt=6V?;k-Z#H)3hV1pp-U*2YNnPJblkj+!-J*T{nggA$**R$v7`B9t6ymvtx?PIe z_+~VcMouFRbo_&Uppqb0i30h}!vo0`jPsB=yzhz*VHfqct!4!90%=@^RJW|e9u71( zSNlsrx=7ykQOzajc{y{(R#F^WD~=aV#&-L`?m_xm@q*-~J^^ceeBiWo0CpK^v?EQb zsyZXA9g^9pH$LNW7)Q601$So@!cKbwX?oHPQRAr!V2dY>;C#n$B~|gSI2M_7PT3-26}`YDmFnz)0h=kyo?^GtIjc4Q`GT5CCUf07g}9 z%A?j&c`IYHB-XfDm41mJMMSt> zp}L!_KM_XDUULuqtBA6b$NK#GR*!`4U$CQ$h;MB^BNXJ&2spn@AA} zL~`Y5TOucof=)vA8ae~OojwY2yfLk3Lo}X3uELYbnm2qrGF$jhmh{)`!AK z?2PDAfej3ST{vkj@q)%aRKL99gn~q|Yr>?U*BmtCp6`4nb2ntu<2Ob}(lI)oBJ9ut ztKzSk>fxBfijI2VZg*U%1}D@x=hC?=brbn98JP(F$3*>fn`c+kF1=+aKRv(b}0AQR7jTM~TDl29j1tHztu8(~RwKn!^?; zkoY~of1uKMjmkY`LkVA;8P32(f+`g^1m?4nan)UM=?nkd9{%{*n+~i!v(ETAse~vq zj5dtyfS{ttCTg>{sWs}|sIm3=`8QzrTo-mwmFhjzv)9Cc%c=6E{d?+v3sHk9qyJ0Y z7)04cZ4#8wAc#Z&FjmeTkw8<+hj_yQ0!0~m19c##Gn4N{$6P(KoJ{9S^Pr>Ljra)U zkni9NR?Tlej$^=A4Qb(Arq9pk!q5ORPoc<1^IV}Bx;a;N0zlz!M;M}J_#3r z>zjm&+Co%O@c^#ln3l5+tS>@j_ma%ov!w^1@PNWbuO2a-Mblqj&d7k>vFY|#MNyie zuXiJI&McJ(DPYOaJM=*-J&dlZOlNpoQ|5hFH%8#aNa9nn9hCY})Gb($+xr`yc00CW zNA`h6m%We3ihhP(Z^J9q?5eqvJU=}DR5oLgefttkJSu^?Ms=M>R^_bX?2795{|OJT zCK&%k#l>y;9+lG$*>4V+T8h}wCZ*|53&{Z1MN+{xa9PBwYj%*R#g3d6S@%URT$orL zI9J6Z#K}b~vg05mZpq;6SgP~5`jZw3*LW7|KZJMz}6@_u)UJ1V&1V0 z?wrXC3q*EzEV7`AhEyy=mnJTq3>bZImFs={aUvn*?7g<+W_b_pk>j1T5h zMhma7q6Kixvq7-%OLF8C7$QtL`s8Pgq;kdS;gqrwWbj#JvN>neA>B@J@(#x@8=ry;r2{aphdF2@e@gebeC1ospf z5Yn|Hnn}q^cU5zW7gVZ5*6B { if (searchResponse == null @@ -386,7 +384,7 @@ private static List unrollAgg( count = getAggCount(agg, rolled.getAsMap()); } - return unrollAgg((InternalAggregation) agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); + return unrollAgg(agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); }).collect(Collectors.toList()); } @@ -580,7 +578,7 @@ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket currentSubAgg = currentTree.getAggregations().get(subAgg.getName()); } - return unrollAgg((InternalAggregation) subAgg, originalSubAgg, currentSubAgg, count); + return unrollAgg(subAgg, originalSubAgg, currentSubAgg, count); }) .collect(Collectors.toList()) ); @@ -619,7 +617,7 @@ private static InternalAggregation unrollMetric(SingleValue metric, long count) } } - private static long getAggCount(Aggregation agg, Map aggMap) { + private static long getAggCount(Aggregation agg, Map aggMap) { String countPath = null; if (agg.getType().equals(DateHistogramAggregationBuilder.NAME) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 095eb141bb39d..2b995b0e56da0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -62,7 +62,7 @@ static Stream processBuckets( // Put the composite keys into a treemap so that the key iteration order is consistent // TODO would be nice to avoid allocating this treemap in the future TreeMap keys = new TreeMap<>(b.getKey()); - List metrics = b.getAggregations().asList(); + List metrics = b.getAggregations().asList(); RollupIDGenerator idGenerator = new RollupIDGenerator(jobId); Map doc = Maps.newMapWithExpectedSize(keys.size() + metrics.size()); @@ -124,7 +124,7 @@ private static void processKeys( }); } - private static void processMetrics(List metrics, Map doc) { + private static void processMetrics(List metrics, Map doc) { List emptyCounts = new ArrayList<>(); metrics.forEach(m -> { if (m instanceof InternalNumericMetricsAggregation.SingleValue) { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 3aeca660e06b3..ae0949f5bedfa 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -230,7 +229,7 @@ public void testRolledMissingAggs() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertThat(responseAggs.asList().size(), equalTo(0)); } finally { // this SearchResponse is not a mock, so must be decRef'd @@ -311,7 +310,7 @@ public void testTranslateRollup() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggs = InternalAggregations.from(aggTree); + InternalAggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); MultiSearchResponse multiSearchResponse = new MultiSearchResponse( new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, @@ -325,7 +324,7 @@ public void testTranslateRollup() throws Exception { ); try { assertNotNull(finalResponse); - Aggregations responseAggs = finalResponse.getAggregations(); + InternalAggregations responseAggs = finalResponse.getAggregations(); assertNotNull(finalResponse); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), equalTo(5.0)); @@ -365,7 +364,7 @@ public void testMissingFilter() { Max protoMax = mock(Max.class); when(protoMax.getName()).thenReturn("foo"); protoAggTree.add(protoMax); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -374,7 +373,7 @@ public void testMissingFilter() { Max max = mock(Max.class); when(max.getName()).thenReturn("bizzbuzz"); aggTreeWithoutFilter.add(max); - Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -396,7 +395,7 @@ public void testMatchingNameNotFilter() { Max protoMax = mock(Max.class); when(protoMax.getName()).thenReturn("foo"); protoAggTree.add(protoMax); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -404,7 +403,7 @@ public void testMatchingNameNotFilter() { List aggTreeWithoutFilter = new ArrayList<>(1); Max max = new Max("filter_foo", 0, DocValueFormat.RAW, null); aggTreeWithoutFilter.add(max); - Aggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTreeWithoutFilter); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -426,7 +425,7 @@ public void testSimpleReduction() throws Exception { List protoAggTree = new ArrayList<>(1); InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null); protoAggTree.add(internalAvg); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -474,7 +473,7 @@ public void testSimpleReduction() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), equalTo(5.0)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 32b9c2df962a9..7971695ecabc1 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -737,7 +736,7 @@ public void testRollupOnly() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggs = InternalAggregations.from(aggTree); + InternalAggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[] { item }, 1); @@ -749,7 +748,7 @@ public void testRollupOnly() throws Exception { ); try { assertNotNull(r); - Aggregations responseAggs = r.getAggregations(); + InternalAggregations responseAggs = r.getAggregations(); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), IsEqual.equalTo(5.0)); } finally { @@ -844,7 +843,7 @@ public void testBoth() throws Exception { List protoAggTree = new ArrayList<>(1); InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, null); protoAggTree.add(internalAvg); - Aggregations protoMockAggs = InternalAggregations.from(protoAggTree); + InternalAggregations protoMockAggs = InternalAggregations.from(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); MultiSearchResponse.Item unrolledResponse = new MultiSearchResponse.Item(protoResponse, null); @@ -874,7 +873,7 @@ public void testBoth() throws Exception { when(filter.getName()).thenReturn("filter_foo"); aggTree.add(filter); - Aggregations mockAggsWithout = InternalAggregations.from(aggTree); + InternalAggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); @@ -893,7 +892,7 @@ public void testBoth() throws Exception { ); try { assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); + InternalAggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); Avg avg = responseAggs.get("foo"); assertThat(avg.getValue(), IsEqual.equalTo(5.0)); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ce9db5015a0da..b6893e853f256 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -138,6 +138,7 @@ public class Constants { "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/connector/update_service_type", + "cluster:admin/xpack/connector/secret/delete", "cluster:admin/xpack/connector/secret/get", "cluster:admin/xpack/connector/secret/post", "cluster:admin/xpack/connector/sync_job/cancel", diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java index e552befc267c8..e9c640236ceb5 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryApiKeyIT.java @@ -63,10 +63,21 @@ public void testQuery() throws IOException { apiKeys.forEach(k -> assertThat(k, not(hasKey("_sort")))); }); + assertQuery(API_KEY_ADMIN_AUTH_HEADER, """ + { "query": { "match": {"name": {"query": "my-ingest-key-1 my-org/alert-key-1", "analyzer": "whitespace"} } } }""", apiKeys -> { + assertThat(apiKeys.size(), equalTo(2)); + assertThat(apiKeys.get(0).get("name"), oneOf("my-ingest-key-1", "my-org/alert-key-1")); + assertThat(apiKeys.get(1).get("name"), oneOf("my-ingest-key-1", "my-org/alert-key-1")); + apiKeys.forEach(k -> assertThat(k, not(hasKey("_sort")))); + }); + // An empty request body means search for all keys assertQuery(API_KEY_ADMIN_AUTH_HEADER, randomBoolean() ? "" : """ {"query":{"match_all":{}}}""", apiKeys -> assertThat(apiKeys.size(), equalTo(6))); + assertQuery(API_KEY_ADMIN_AUTH_HEADER, randomBoolean() ? "" : """ + { "query": { "match": {"type": "rest"} } }""", apiKeys -> assertThat(apiKeys.size(), equalTo(6))); + assertQuery( API_KEY_ADMIN_AUTH_HEADER, """ diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 3833a6466c67c..49c2da7b173ec 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -778,6 +778,58 @@ public void testQueryCrossClusterApiKeysByType() throws IOException { assertThat(queryResponse.evaluate("api_keys.0.name"), is("test-cross-key-query-2")); } + public void testSortApiKeysByType() throws IOException { + List apiKeyIds = new ArrayList<>(2); + // create regular api key + EncodedApiKey encodedApiKey = createApiKey("test-rest-key", Map.of("tag", "rest")); + apiKeyIds.add(encodedApiKey.id()); + // create cross-cluster key + Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); + createRequest.setJsonEntity(""" + { + "name": "test-cross-key", + "access": { + "search": [ + { + "names": [ "whatever" ] + } + ] + }, + "metadata": { "tag": "cross" } + }"""); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + apiKeyIds.add(createResponse.evaluate("id")); + + // desc sort all (2) keys - by type + Request queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(""" + {"sort":[{"type":{"order":"desc"}}]}"""); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + ObjectPath queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(2)); + assertThat(queryResponse.evaluate("count"), is(2)); + assertThat(queryResponse.evaluate("api_keys.0.id"), is(apiKeyIds.get(0))); + assertThat(queryResponse.evaluate("api_keys.0.type"), is("rest")); + assertThat(queryResponse.evaluate("api_keys.1.id"), is(apiKeyIds.get(1))); + assertThat(queryResponse.evaluate("api_keys.1.type"), is("cross_cluster")); + + // asc sort all (2) keys - by type + queryRequest = new Request("GET", "/_security/_query/api_key"); + queryRequest.addParameter("with_limited_by", String.valueOf(randomBoolean())); + queryRequest.setJsonEntity(""" + {"sort":[{"type":{"order":"asc"}}]}"""); + setUserForRequest(queryRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + queryResponse = assertOKAndCreateObjectPath(client().performRequest(queryRequest)); + assertThat(queryResponse.evaluate("total"), is(2)); + assertThat(queryResponse.evaluate("count"), is(2)); + assertThat(queryResponse.evaluate("api_keys.0.id"), is(apiKeyIds.get(1))); + assertThat(queryResponse.evaluate("api_keys.0.type"), is("cross_cluster")); + assertThat(queryResponse.evaluate("api_keys.1.id"), is(apiKeyIds.get(0))); + assertThat(queryResponse.evaluate("api_keys.1.type"), is("rest")); + } + public void testCreateCrossClusterApiKey() throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(""" diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java index 9d25802544d38..b1f73251cdb47 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; @@ -81,22 +82,25 @@ protected void doExecute(Task task, QueryApiKeyRequest request, ActionListener { + searchSourceBuilder.query(ApiKeyBoolQueryBuilder.build(request.getQueryBuilder(), fieldName -> { if (API_KEY_TYPE_RUNTIME_MAPPING_FIELD.equals(fieldName)) { accessesApiKeyTypeField.set(true); } - }, request.isFilterForCurrentUser() ? authentication : null); - searchSourceBuilder.query(apiKeyBoolQueryBuilder); + }, request.isFilterForCurrentUser() ? authentication : null)); + + if (request.getFieldSortBuilders() != null) { + translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder, fieldName -> { + if (API_KEY_TYPE_RUNTIME_MAPPING_FIELD.equals(fieldName)) { + accessesApiKeyTypeField.set(true); + } + }); + } // only add the query-level runtime field to the search request if it's actually referring the "type" field if (accessesApiKeyTypeField.get()) { searchSourceBuilder.runtimeMappings(API_KEY_TYPE_RUNTIME_MAPPING); } - if (request.getFieldSortBuilders() != null) { - translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder); - } - if (request.getSearchAfterBuilder() != null) { searchSourceBuilder.searchAfter(request.getSearchAfterBuilder().getSortValues()); } @@ -106,7 +110,11 @@ protected void doExecute(Task task, QueryApiKeyRequest request, ActionListener fieldSortBuilders, SearchSourceBuilder searchSourceBuilder) { + static void translateFieldSortBuilders( + List fieldSortBuilders, + SearchSourceBuilder searchSourceBuilder, + Consumer fieldNameVisitor + ) { fieldSortBuilders.forEach(fieldSortBuilder -> { if (fieldSortBuilder.getNestedSort() != null) { throw new IllegalArgumentException("nested sorting is not supported for API Key query"); @@ -115,6 +123,7 @@ static void translateFieldSortBuilders(List fieldSortBuilders, searchSourceBuilder.sort(fieldSortBuilder); } else { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(fieldSortBuilder.getFieldName()); + fieldNameVisitor.accept(translatedFieldName); if (translatedFieldName.equals(fieldSortBuilder.getFieldName())) { searchSourceBuilder.sort(fieldSortBuilder); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java index 9f7b84e4a2698..651427d07e651 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -111,7 +112,8 @@ private static QueryBuilder doProcess(QueryBuilder qb, Consumer fieldNam if (qb instanceof final BoolQueryBuilder query) { final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() .minimumShouldMatch(query.minimumShouldMatch()) - .adjustPureNegative(query.adjustPureNegative()); + .adjustPureNegative(query.adjustPureNegative()) + .boost(query.boost()); query.must().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::must); query.should().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::should); query.mustNot().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::mustNot); @@ -124,28 +126,63 @@ private static QueryBuilder doProcess(QueryBuilder qb, Consumer fieldNam } else if (qb instanceof final TermQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.termQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + return QueryBuilders.termQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .boost(query.boost()); } else if (qb instanceof final ExistsQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.existsQuery(translatedFieldName); + return QueryBuilders.existsQuery(translatedFieldName).boost(query.boost()); } else if (qb instanceof final TermsQueryBuilder query) { if (query.termsLookup() != null) { throw new IllegalArgumentException("terms query with terms lookup is not supported for API Key query"); } final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.termsQuery(translatedFieldName, query.getValues()); + return QueryBuilders.termsQuery(translatedFieldName, query.getValues()).boost(query.boost()); } else if (qb instanceof final PrefixQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.prefixQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + return QueryBuilders.prefixQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .rewrite(query.rewrite()) + .boost(query.boost()); } else if (qb instanceof final WildcardQueryBuilder query) { final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); fieldNameVisitor.accept(translatedFieldName); return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) .caseInsensitive(query.caseInsensitive()) - .rewrite(query.rewrite()); + .rewrite(query.rewrite()) + .boost(query.boost()); + } else if (qb instanceof final MatchQueryBuilder query) { + final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + final MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(translatedFieldName, query.value()); + if (query.operator() != null) { + matchQueryBuilder.operator(query.operator()); + } + if (query.analyzer() != null) { + matchQueryBuilder.analyzer(query.analyzer()); + } + if (query.fuzziness() != null) { + matchQueryBuilder.fuzziness(query.fuzziness()); + } + if (query.minimumShouldMatch() != null) { + matchQueryBuilder.minimumShouldMatch(query.minimumShouldMatch()); + } + if (query.fuzzyRewrite() != null) { + matchQueryBuilder.fuzzyRewrite(query.fuzzyRewrite()); + } + if (query.zeroTermsQuery() != null) { + matchQueryBuilder.zeroTermsQuery(query.zeroTermsQuery()); + } + matchQueryBuilder.prefixLength(query.prefixLength()) + .maxExpansions(query.maxExpansions()) + .fuzzyTranspositions(query.fuzzyTranspositions()) + .lenient(query.lenient()) + .autoGenerateSynonymsPhraseQuery(query.autoGenerateSynonymsPhraseQuery()) + .boost(query.boost()); + return matchQueryBuilder; } else if (qb instanceof final RangeQueryBuilder query) { if (query.relation() != null) { throw new IllegalArgumentException("range query with relation is not supported for API Key query"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java index d1a0b5d7ca95c..4a2f707d3e902 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java @@ -14,15 +14,18 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.stream.IntStream; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class TransportQueryApiKeyActionTests extends ESTestCase { public void testTranslateFieldSortBuilders() { + final String metadataField = randomAlphaOfLengthBetween(3, 8); final List fieldNames = List.of( "_doc", "username", @@ -30,14 +33,16 @@ public void testTranslateFieldSortBuilders() { "name", "creation", "expiration", + "type", "invalidated", - "metadata." + randomAlphaOfLengthBetween(3, 8) + "metadata." + metadataField ); final List originals = fieldNames.stream().map(this::randomFieldSortBuilderWithName).toList(); + List sortFields = new ArrayList<>(); final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); - TransportQueryApiKeyAction.translateFieldSortBuilders(originals, searchSourceBuilder); + TransportQueryApiKeyAction.translateFieldSortBuilders(originals, searchSourceBuilder, sortFields::add); IntStream.range(0, originals.size()).forEach(i -> { final FieldSortBuilder original = originals.get(i); @@ -57,6 +62,8 @@ public void testTranslateFieldSortBuilders() { assertThat(translated.getFieldName(), equalTo("api_key_invalidated")); } else if (original.getFieldName().startsWith("metadata.")) { assertThat(translated.getFieldName(), equalTo("metadata_flattened." + original.getFieldName().substring(9))); + } else if ("type".equals(original.getFieldName())) { + assertThat(translated.getFieldName(), equalTo("runtime_key_type")); } else { fail("unrecognized field name: [" + original.getFieldName() + "]"); } @@ -68,6 +75,19 @@ public void testTranslateFieldSortBuilders() { assertThat(translated.sortMode(), equalTo(original.sortMode())); } }); + assertThat( + sortFields, + containsInAnyOrder( + "creator.principal", + "creator.realm", + "name", + "creation_time", + "expiration_time", + "runtime_key_type", + "api_key_invalidated", + "metadata_flattened." + metadataField + ) + ); } public void testNestedSortingIsNotAllowed() { @@ -75,7 +95,11 @@ public void testNestedSortingIsNotAllowed() { fieldSortBuilder.setNestedSort(new NestedSortBuilder("name")); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> TransportQueryApiKeyAction.translateFieldSortBuilders(List.of(fieldSortBuilder), SearchSourceBuilder.searchSource()) + () -> TransportQueryApiKeyAction.translateFieldSortBuilders( + List.of(fieldSortBuilder), + SearchSourceBuilder.searchSource(), + ignored -> {} + ) ); assertThat(e.getMessage(), equalTo("nested sorting is not supported for API Key query")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 46fde61690017..61646f5ff375b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -345,6 +345,7 @@ public void testElasticEnterpriseSearchServerAccount() { assertThat(role.cluster().check(ILMActions.PUT.name(), request, authentication), is(true)); // Connector secrets. Enterprise Search has read and write access. + assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/delete", request, authentication), is(true)); assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/get", request, authentication), is(true)); assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/post", request, authentication), is(true)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java index 4064d9f0ce4da..44b81b96e2154 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java @@ -7,13 +7,16 @@ package org.elasticsearch.xpack.security.support; +import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MultiTermQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -24,10 +27,12 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.index.query.ZeroTermsQueryOption; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.script.Script; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationTests; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.user.User; @@ -47,11 +52,13 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -60,17 +67,144 @@ public class ApiKeyBoolQueryBuilderTests extends ESTestCase { public void testBuildFromSimpleQuery() { - final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; - final QueryBuilder q1 = randomSimpleQuery("name"); - final List queryFields = new ArrayList<>(); - final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication); - assertQueryFields(queryFields, q1, authentication); - assertCommonFilterQueries(apiKeyQb1, authentication); - final List mustQueries = apiKeyQb1.must(); + { + QueryBuilder qb = randomSimpleQuery("name"); + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, null); + assertQueryFields(queryFields, qb, null); + assertCommonFilterQueries(apiKeyQb, null); + List mustQueries = apiKeyQb.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), equalTo(qb)); + assertThat(apiKeyQb.should(), emptyIterable()); + assertThat(apiKeyQb.mustNot(), emptyIterable()); + } + { + Authentication authentication = AuthenticationTests.randomAuthentication(null, null); + QueryBuilder qb = randomSimpleQuery("name"); + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, authentication); + assertQueryFields(queryFields, qb, authentication); + assertCommonFilterQueries(apiKeyQb, authentication); + List mustQueries = apiKeyQb.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), equalTo(qb)); + assertThat(apiKeyQb.should(), emptyIterable()); + assertThat(apiKeyQb.mustNot(), emptyIterable()); + } + { + String apiKeyId = randomUUID(); + Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), apiKeyId); + QueryBuilder qb = randomSimpleQuery("name"); + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, authentication); + assertQueryFields(queryFields, qb, authentication); + assertCommonFilterQueries(apiKeyQb, authentication); + List mustQueries = apiKeyQb.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), equalTo(qb)); + assertThat(apiKeyQb.should(), emptyIterable()); + assertThat(apiKeyQb.mustNot(), emptyIterable()); + } + } + + public void testPrefixQueryBuilderPropertiesArePreserved() { + Authentication authentication = randomFrom( + AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()), + AuthenticationTests.randomAuthentication(null, null), + null + ); + String fieldName = randomValidFieldName(); + PrefixQueryBuilder prefixQueryBuilder = QueryBuilders.prefixQuery(fieldName, randomAlphaOfLengthBetween(0, 4)); + if (randomBoolean()) { + prefixQueryBuilder.boost(Math.abs(randomFloat())); + } + if (randomBoolean()) { + prefixQueryBuilder.caseInsensitive(randomBoolean()); + } + if (randomBoolean()) { + prefixQueryBuilder.rewrite(randomAlphaOfLengthBetween(0, 4)); + } + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(prefixQueryBuilder, queryFields::add, authentication); + assertThat(queryFields, hasItem(ApiKeyFieldNameTranslators.translate(fieldName))); + List mustQueries = apiKeyMatchQueryBuilder.must(); assertThat(mustQueries, hasSize(1)); - assertThat(mustQueries.get(0), equalTo(q1)); - assertTrue(apiKeyQb1.should().isEmpty()); - assertTrue(apiKeyQb1.mustNot().isEmpty()); + assertThat(mustQueries.get(0), instanceOf(PrefixQueryBuilder.class)); + PrefixQueryBuilder prefixQueryBuilder2 = (PrefixQueryBuilder) mustQueries.get(0); + assertThat(prefixQueryBuilder2.fieldName(), is(ApiKeyFieldNameTranslators.translate(prefixQueryBuilder.fieldName()))); + assertThat(prefixQueryBuilder2.value(), is(prefixQueryBuilder.value())); + assertThat(prefixQueryBuilder2.boost(), is(prefixQueryBuilder.boost())); + assertThat(prefixQueryBuilder2.caseInsensitive(), is(prefixQueryBuilder.caseInsensitive())); + assertThat(prefixQueryBuilder2.rewrite(), is(prefixQueryBuilder.rewrite())); + } + + public void testMatchQueryBuilderPropertiesArePreserved() { + // the match query has many properties, that all must be preserved after limiting for API Key docs only + Authentication authentication = randomFrom( + AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()), + AuthenticationTests.randomAuthentication(null, null), + null + ); + String fieldName = randomValidFieldName(); + MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(fieldName, new Object()); + if (randomBoolean()) { + matchQueryBuilder.boost(Math.abs(randomFloat())); + } + if (randomBoolean()) { + matchQueryBuilder.operator(randomFrom(Operator.OR, Operator.AND)); + } + if (randomBoolean()) { + matchQueryBuilder.analyzer(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + matchQueryBuilder.fuzziness(randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO, Fuzziness.AUTO)); + } + if (randomBoolean()) { + matchQueryBuilder.minimumShouldMatch(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + matchQueryBuilder.fuzzyRewrite(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + matchQueryBuilder.zeroTermsQuery(randomFrom(ZeroTermsQueryOption.NONE, ZeroTermsQueryOption.ALL, ZeroTermsQueryOption.NULL)); + } + if (randomBoolean()) { + matchQueryBuilder.prefixLength(randomNonNegativeInt()); + } + if (randomBoolean()) { + matchQueryBuilder.maxExpansions(randomIntBetween(1, 100)); + } + if (randomBoolean()) { + matchQueryBuilder.fuzzyTranspositions(randomBoolean()); + } + if (randomBoolean()) { + matchQueryBuilder.lenient(randomBoolean()); + } + if (randomBoolean()) { + matchQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean()); + } + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(matchQueryBuilder, queryFields::add, authentication); + assertThat(queryFields, hasItem(ApiKeyFieldNameTranslators.translate(fieldName))); + List mustQueries = apiKeyMatchQueryBuilder.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), instanceOf(MatchQueryBuilder.class)); + MatchQueryBuilder matchQueryBuilder2 = (MatchQueryBuilder) mustQueries.get(0); + assertThat(matchQueryBuilder2.fieldName(), is(ApiKeyFieldNameTranslators.translate(matchQueryBuilder.fieldName()))); + assertThat(matchQueryBuilder2.value(), is(matchQueryBuilder.value())); + assertThat(matchQueryBuilder2.operator(), is(matchQueryBuilder.operator())); + assertThat(matchQueryBuilder2.analyzer(), is(matchQueryBuilder.analyzer())); + assertThat(matchQueryBuilder2.fuzziness(), is(matchQueryBuilder.fuzziness())); + assertThat(matchQueryBuilder2.minimumShouldMatch(), is(matchQueryBuilder.minimumShouldMatch())); + assertThat(matchQueryBuilder2.fuzzyRewrite(), is(matchQueryBuilder.fuzzyRewrite())); + assertThat(matchQueryBuilder2.zeroTermsQuery(), is(matchQueryBuilder.zeroTermsQuery())); + assertThat(matchQueryBuilder2.prefixLength(), is(matchQueryBuilder.prefixLength())); + assertThat(matchQueryBuilder2.maxExpansions(), is(matchQueryBuilder.maxExpansions())); + assertThat(matchQueryBuilder2.fuzzyTranspositions(), is(matchQueryBuilder.fuzzyTranspositions())); + assertThat(matchQueryBuilder2.lenient(), is(matchQueryBuilder.lenient())); + assertThat(matchQueryBuilder2.autoGenerateSynonymsPhraseQuery(), is(matchQueryBuilder.autoGenerateSynonymsPhraseQuery())); + assertThat(matchQueryBuilder2.boost(), is(matchQueryBuilder.boost())); } public void testQueryForDomainAuthentication() { @@ -405,7 +539,6 @@ public void testDisallowedQueryTypes() { final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null; final AbstractQueryBuilder> q1 = randomFrom( - QueryBuilders.matchQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), QueryBuilders.constantScoreQuery(mock(QueryBuilder.class)), QueryBuilders.boostingQuery(mock(QueryBuilder.class), mock(QueryBuilder.class)), QueryBuilders.queryStringQuery("q=a:42"), @@ -760,20 +893,38 @@ private void assertCommonFilterQueries(ApiKeyBoolQueryBuilder qb, Authentication if (authentication == null) { return; } - assertTrue( - tqb.stream() - .anyMatch( - q -> q.equals(QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal())) + if (authentication.isApiKey()) { + List idsQueryBuilders = qb.filter() + .stream() + .filter(q -> q.getClass() == IdsQueryBuilder.class) + .map(q -> (IdsQueryBuilder) q) + .toList(); + assertThat(idsQueryBuilders, iterableWithSize(1)); + assertThat( + idsQueryBuilders.get(0), + equalTo( + QueryBuilders.idsQuery() + .addIds((String) authentication.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY)) ) - ); - assertTrue( - tqb.stream() - .anyMatch(q -> q.equals(QueryBuilders.termQuery("creator.realm", ApiKeyService.getCreatorRealmName(authentication)))) - ); + ); + } else { + assertTrue( + tqb.stream() + .anyMatch( + q -> q.equals( + QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal()) + ) + ) + ); + assertTrue( + tqb.stream() + .anyMatch(q -> q.equals(QueryBuilders.termQuery("creator.realm", ApiKeyService.getCreatorRealmName(authentication)))) + ); + } } private QueryBuilder randomSimpleQuery(String fieldName) { - return switch (randomIntBetween(0, 8)) { + return switch (randomIntBetween(0, 9)) { case 0 -> QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)); case 1 -> QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))); case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22))); @@ -788,6 +939,11 @@ private QueryBuilder randomSimpleQuery(String fieldName) { .field(fieldName) .lenient(randomBoolean()) .analyzeWildcard(randomBoolean()); + case 9 -> QueryBuilders.matchQuery(fieldName, randomAlphaOfLengthBetween(3, 8)) + .operator(randomFrom(Operator.OR, Operator.AND)) + .lenient(randomBoolean()) + .maxExpansions(randomIntBetween(1, 100)) + .analyzer(randomFrom(randomAlphaOfLength(4), null)); default -> throw new IllegalStateException("illegal switch case"); }; } @@ -802,4 +958,19 @@ private void assertQueryFields(List actualQueryFields, QueryBuilder quer assertThat(actualQueryFields, hasItem("creator.realm")); } } + + private static String randomValidFieldName() { + return randomFrom( + "username", + "realm_name", + "name", + "type", + "creation", + "expiration", + "invalidated", + "invalidation", + "metadata", + "metadata.what.ever" + ); + } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/ShapeUtils.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/ShapeUtils.java deleted file mode 100644 index 289fbe6e707ca..0000000000000 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/ShapeUtils.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.spatial.common; - -import org.elasticsearch.geometry.Circle; -import org.elasticsearch.geometry.Line; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.Rectangle; - -/** - * Utility class that transforms Elasticsearch geometry objects to the Lucene representation - */ -public class ShapeUtils { - // no instance: - private ShapeUtils() {} - - public static org.apache.lucene.geo.XYPolygon toLuceneXYPolygon(Polygon polygon) { - org.apache.lucene.geo.XYPolygon[] holes = new org.apache.lucene.geo.XYPolygon[polygon.getNumberOfHoles()]; - for (int i = 0; i < holes.length; i++) { - holes[i] = new org.apache.lucene.geo.XYPolygon( - doubleArrayToFloatArray(polygon.getHole(i).getX()), - doubleArrayToFloatArray(polygon.getHole(i).getY()) - ); - } - return new org.apache.lucene.geo.XYPolygon( - doubleArrayToFloatArray(polygon.getPolygon().getX()), - doubleArrayToFloatArray(polygon.getPolygon().getY()), - holes - ); - } - - public static org.apache.lucene.geo.XYPolygon toLuceneXYPolygon(Rectangle r) { - return new org.apache.lucene.geo.XYPolygon( - new float[] { (float) r.getMinX(), (float) r.getMaxX(), (float) r.getMaxX(), (float) r.getMinX(), (float) r.getMinX() }, - new float[] { (float) r.getMinY(), (float) r.getMinY(), (float) r.getMaxY(), (float) r.getMaxY(), (float) r.getMinY() } - ); - } - - public static org.apache.lucene.geo.XYRectangle toLuceneXYRectangle(Rectangle r) { - return new org.apache.lucene.geo.XYRectangle((float) r.getMinX(), (float) r.getMaxX(), (float) r.getMinY(), (float) r.getMaxY()); - } - - public static org.apache.lucene.geo.XYPoint toLuceneXYPoint(Point point) { - return new org.apache.lucene.geo.XYPoint((float) point.getX(), (float) point.getY()); - } - - public static org.apache.lucene.geo.XYLine toLuceneXYLine(Line line) { - return new org.apache.lucene.geo.XYLine(doubleArrayToFloatArray(line.getX()), doubleArrayToFloatArray(line.getY())); - } - - public static org.apache.lucene.geo.XYCircle toLuceneXYCircle(Circle circle) { - return new org.apache.lucene.geo.XYCircle((float) circle.getX(), (float) circle.getY(), (float) circle.getRadiusMeters()); - } - - private static float[] doubleArrayToFloatArray(double[] array) { - float[] result = new float[array.length]; - for (int i = 0; i < array.length; ++i) { - result[i] = (float) array[i]; - } - return result; - } - -} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java index b8e665c0c768a..c23d63baa5791 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java @@ -8,6 +8,7 @@ import org.apache.lucene.document.XYShape; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; @@ -21,7 +22,6 @@ import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.mapper.ShapeIndexer; -import org.elasticsearch.xpack.spatial.common.ShapeUtils; import java.util.ArrayList; import java.util.Arrays; @@ -70,7 +70,7 @@ public Void visit(GeometryCollection collection) { @Override public Void visit(Line line) { - addFields(XYShape.createIndexableFields(name, ShapeUtils.toLuceneXYLine(line))); + addFields(XYShape.createIndexableFields(name, LuceneGeometriesUtils.toXYLine(line))); return null; } @@ -111,13 +111,13 @@ public Void visit(Point point) { @Override public Void visit(Polygon polygon) { - addFields(XYShape.createIndexableFields(name, ShapeUtils.toLuceneXYPolygon(polygon), true)); + addFields(XYShape.createIndexableFields(name, LuceneGeometriesUtils.toXYPolygon(polygon), true)); return null; } @Override public Void visit(Rectangle r) { - addFields(XYShape.createIndexableFields(name, ShapeUtils.toLuceneXYPolygon(r))); + addFields(XYShape.createIndexableFields(name, toLuceneXYPolygon(r))); return null; } @@ -125,4 +125,11 @@ private void addFields(IndexableField[] fields) { this.fields.addAll(Arrays.asList(fields)); } } + + private static org.apache.lucene.geo.XYPolygon toLuceneXYPolygon(Rectangle r) { + return new org.apache.lucene.geo.XYPolygon( + new float[] { (float) r.getMinX(), (float) r.getMaxX(), (float) r.getMaxX(), (float) r.getMinX(), (float) r.getMinX() }, + new float[] { (float) r.getMinY(), (float) r.getMinY(), (float) r.getMaxY(), (float) r.getMaxY(), (float) r.getMinY() } + ); + } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryPointProcessor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryPointProcessor.java index d455d0f539cfa..a8c084e7e0f01 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryPointProcessor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryPointProcessor.java @@ -8,44 +8,43 @@ import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.document.XYPointField; -import org.apache.lucene.geo.XYCircle; -import org.apache.lucene.geo.XYRectangle; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.geo.XYGeometry; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.geometry.GeometryCollection; -import org.elasticsearch.geometry.GeometryVisitor; -import org.elasticsearch.geometry.LinearRing; -import org.elasticsearch.geometry.MultiLine; -import org.elasticsearch.geometry.MultiPoint; -import org.elasticsearch.geometry.MultiPolygon; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.xpack.spatial.common.ShapeUtils; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper; +import java.util.function.Consumer; + public class ShapeQueryPointProcessor { - public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation, SearchExecutionContext context) { - validateIsPointFieldType(fieldName, context); + public Query shapeQuery(Geometry geometry, String fieldName, ShapeRelation relation, SearchExecutionContext context) { + final boolean hasDocValues = validateIsPointFieldType(fieldName, context); // only the intersects relation is supported for indexed cartesian point types if (relation != ShapeRelation.INTERSECTS) { throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); } - // wrap XYPoint query as a ConstantScoreQuery - return getVectorQueryFromShape(shape, fieldName, relation, context); + final Consumer checker = t -> { + if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { + throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + t + " queries"); + } + }; + final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, checker); + Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (hasDocValues) { + final Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; } - private void validateIsPointFieldType(String fieldName, SearchExecutionContext context) { + private boolean validateIsPointFieldType(String fieldName, SearchExecutionContext context) { MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType instanceof PointFieldMapper.PointFieldType == false) { throw new QueryShardException( @@ -53,118 +52,6 @@ private void validateIsPointFieldType(String fieldName, SearchExecutionContext c "Expected " + PointFieldMapper.CONTENT_TYPE + " field type for Field [" + fieldName + "] but found " + fieldType.typeName() ); } - } - - protected Query getVectorQueryFromShape(Geometry queryShape, String fieldName, ShapeRelation relation, SearchExecutionContext context) { - ShapeVisitor shapeVisitor = new ShapeVisitor(context, fieldName, relation); - return queryShape.visit(shapeVisitor); - } - - private class ShapeVisitor implements GeometryVisitor { - SearchExecutionContext context; - MappedFieldType fieldType; - String fieldName; - ShapeRelation relation; - - ShapeVisitor(SearchExecutionContext context, String fieldName, ShapeRelation relation) { - this.context = context; - this.fieldType = context.getFieldType(fieldName); - this.fieldName = fieldName; - this.relation = relation; - } - - @Override - public Query visit(Circle circle) { - XYCircle xyCircle = ShapeUtils.toLuceneXYCircle(circle); - Query query = XYPointField.newDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); - if (fieldType.hasDocValues()) { - Query dvQuery = XYDocValuesField.newSlowDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); - query = new IndexOrDocValuesQuery(query, dvQuery); - } - return query; - } - - @Override - public Query visit(GeometryCollection collection) { - BooleanQuery.Builder bqb = new BooleanQuery.Builder(); - visit(bqb, collection); - return bqb.build(); - } - - private void visit(BooleanQuery.Builder bqb, GeometryCollection collection) { - BooleanClause.Occur occur = BooleanClause.Occur.FILTER; - for (Geometry shape : collection) { - bqb.add(shape.visit(this), occur); - } - } - - @Override - public Query visit(org.elasticsearch.geometry.Line line) { - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + ShapeType.LINESTRING + " queries"); - } - - @Override - // don't think this is called directly - public Query visit(LinearRing ring) { - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + ShapeType.LINEARRING + " queries"); - } - - @Override - public Query visit(MultiLine multiLine) { - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + ShapeType.MULTILINESTRING + " queries"); - } - - @Override - public Query visit(MultiPoint multiPoint) { - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + ShapeType.MULTIPOINT + " queries"); - } - - @Override - public Query visit(MultiPolygon multiPolygon) { - org.apache.lucene.geo.XYPolygon[] lucenePolygons = new org.apache.lucene.geo.XYPolygon[multiPolygon.size()]; - for (int i = 0; i < multiPolygon.size(); i++) { - lucenePolygons[i] = ShapeUtils.toLuceneXYPolygon(multiPolygon.get(i)); - } - Query query = XYPointField.newPolygonQuery(fieldName, lucenePolygons); - if (fieldType.hasDocValues()) { - Query dvQuery = XYDocValuesField.newSlowPolygonQuery(fieldName, lucenePolygons); - query = new IndexOrDocValuesQuery(query, dvQuery); - } - return query; - } - - @Override - public Query visit(Point point) { - // not currently supported - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + ShapeType.POINT + " queries"); - } - - @Override - public Query visit(Polygon polygon) { - org.apache.lucene.geo.XYPolygon lucenePolygon = ShapeUtils.toLuceneXYPolygon(polygon); - Query query = XYPointField.newPolygonQuery(fieldName, lucenePolygon); - if (fieldType.hasDocValues()) { - Query dvQuery = XYDocValuesField.newSlowPolygonQuery(fieldName, lucenePolygon); - query = new IndexOrDocValuesQuery(query, dvQuery); - } - return query; - } - - @Override - public Query visit(Rectangle r) { - XYRectangle xyRectangle = ShapeUtils.toLuceneXYRectangle(r); - Query query = XYPointField.newBoxQuery(fieldName, xyRectangle.minX, xyRectangle.maxX, xyRectangle.minY, xyRectangle.maxY); - if (fieldType.hasDocValues()) { - Query dvQuery = XYDocValuesField.newSlowBoxQuery( - fieldName, - xyRectangle.minX, - xyRectangle.maxX, - xyRectangle.minY, - xyRectangle.maxY - ); - query = new IndexOrDocValuesQuery(query, dvQuery); - } - return query; - } + return fieldType.hasDocValues(); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java index ac526e6016b23..4bb9e988c0f90 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java @@ -11,34 +11,20 @@ import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.geometry.GeometryCollection; -import org.elasticsearch.geometry.GeometryVisitor; -import org.elasticsearch.geometry.Line; -import org.elasticsearch.geometry.LinearRing; -import org.elasticsearch.geometry.MultiLine; -import org.elasticsearch.geometry.MultiPoint; -import org.elasticsearch.geometry.MultiPolygon; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.xpack.spatial.common.ShapeUtils; import org.elasticsearch.xpack.spatial.index.mapper.CartesianShapeDocValuesQuery; import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper; -import java.util.ArrayList; -import java.util.List; - public class ShapeQueryProcessor { public Query shapeQuery( - Geometry shape, + Geometry geometry, String fieldName, ShapeRelation relation, SearchExecutionContext context, @@ -49,10 +35,21 @@ public Query shapeQuery( if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { throw new QueryShardException(context, ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]."); } - if (shape == null) { + if (geometry == null || geometry.isEmpty()) { return new MatchNoDocsQuery(); } - return getVectorQueryFromShape(shape, fieldName, relation, context, hasDocValues); + final XYGeometry[] luceneGeometries; + try { + luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + } catch (IllegalArgumentException e) { + throw new QueryShardException(context, "Exception creating query on Field [" + fieldName + "] " + e.getMessage(), e); + } + Query query = XYShape.newGeometryQuery(fieldName, relation.getLuceneRelation(), luceneGeometries); + if (hasDocValues) { + final Query queryDocValues = new CartesianShapeDocValuesQuery(fieldName, relation.getLuceneRelation(), luceneGeometries); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; } private void validateIsShapeFieldType(String fieldName, SearchExecutionContext context) { @@ -64,119 +61,4 @@ private void validateIsShapeFieldType(String fieldName, SearchExecutionContext c ); } } - - private Query getVectorQueryFromShape( - Geometry queryShape, - String fieldName, - ShapeRelation relation, - SearchExecutionContext context, - boolean hasDocValues - ) { - final LuceneGeometryCollector visitor = new LuceneGeometryCollector(fieldName, context); - queryShape.visit(visitor); - final List geomList = visitor.geometries(); - if (geomList.size() == 0) { - return new MatchNoDocsQuery(); - } - XYGeometry[] geometries = geomList.toArray(new XYGeometry[0]); - Query query = XYShape.newGeometryQuery(fieldName, relation.getLuceneRelation(), geometries); - if (hasDocValues) { - final Query queryDocValues = new CartesianShapeDocValuesQuery(fieldName, relation.getLuceneRelation(), geometries); - query = new IndexOrDocValuesQuery(query, queryDocValues); - } - return query; - } - - private static class LuceneGeometryCollector implements GeometryVisitor { - private final List geometries = new ArrayList<>(); - private final String name; - private final SearchExecutionContext context; - - private LuceneGeometryCollector(String name, SearchExecutionContext context) { - this.name = name; - this.context = context; - } - - List geometries() { - return geometries; - } - - @Override - public Void visit(Circle circle) { - if (circle.isEmpty() == false) { - geometries.add(ShapeUtils.toLuceneXYCircle(circle)); - } - return null; - } - - @Override - public Void visit(GeometryCollection collection) { - for (Geometry shape : collection) { - shape.visit(this); - } - return null; - } - - @Override - public Void visit(Line line) { - if (line.isEmpty() == false) { - geometries.add(ShapeUtils.toLuceneXYLine(line)); - } - return null; - } - - @Override - public Void visit(LinearRing ring) { - throw new QueryShardException(context, "Field [" + name + "] found and unsupported shape LinearRing"); - } - - @Override - public Void visit(MultiLine multiLine) { - for (Line line : multiLine) { - visit(line); - } - return null; - } - - @Override - public Void visit(MultiPoint multiPoint) { - for (Point point : multiPoint) { - visit(point); - } - return null; - } - - @Override - public Void visit(MultiPolygon multiPolygon) { - for (Polygon polygon : multiPolygon) { - visit(polygon); - } - return null; - } - - @Override - public Void visit(Point point) { - if (point.isEmpty() == false) { - geometries.add(ShapeUtils.toLuceneXYPoint(point)); - } - return null; - - } - - @Override - public Void visit(Polygon polygon) { - if (polygon.isEmpty() == false) { - geometries.add(ShapeUtils.toLuceneXYPolygon(polygon)); - } - return null; - } - - @Override - public Void visit(Rectangle r) { - if (r.isEmpty() == false) { - geometries.add(ShapeUtils.toLuceneXYRectangle(r)); - } - return null; - } - } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java index ae5a6f182274b..f2148799d1b5f 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java @@ -24,12 +24,12 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.search.CheckHits; import org.apache.lucene.tests.search.QueryUtils; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; import org.elasticsearch.core.IOUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geo.XShapeTestUtil; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.spatial.common.ShapeUtils; import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import java.io.IOException; @@ -41,7 +41,7 @@ public class CartesianShapeDocValuesQueryTests extends ESTestCase { private static final String FIELD_NAME = "field"; public void testEqualsAndHashcode() { - XYPolygon polygon = ShapeUtils.toLuceneXYPolygon(ShapeTestUtils.randomPolygon(false)); + XYPolygon polygon = LuceneGeometriesUtils.toXYPolygon(ShapeTestUtils.randomPolygon(false)); Query q1 = new CartesianShapeDocValuesQuery(FIELD_NAME, ShapeField.QueryRelation.INTERSECTS, polygon); Query q2 = new CartesianShapeDocValuesQuery(FIELD_NAME, ShapeField.QueryRelation.INTERSECTS, polygon); QueryUtils.checkEqual(q1, q2); @@ -160,9 +160,9 @@ private XYGeometry[] randomLuceneQueryGeometries() { private XYGeometry randomLuceneQueryGeometry() { return switch (randomInt(3)) { - case 0 -> ShapeUtils.toLuceneXYPolygon(ShapeTestUtils.randomPolygon(false)); - case 1 -> ShapeUtils.toLuceneXYCircle(ShapeTestUtils.randomCircle(false)); - case 2 -> ShapeUtils.toLuceneXYPoint(ShapeTestUtils.randomPoint(false)); + case 0 -> LuceneGeometriesUtils.toXYPolygon(ShapeTestUtils.randomPolygon(false)); + case 1 -> LuceneGeometriesUtils.toXYCircle(ShapeTestUtils.randomCircle(false)); + case 2 -> LuceneGeometriesUtils.toXYPoint(ShapeTestUtils.randomPoint(false)); default -> XShapeTestUtil.nextBox(); }; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 7e20320d9d815..1d7a3cdd836ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -29,8 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; @@ -215,7 +214,7 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfig } protected static void logSearchResponse(SearchResponse response, Logger logger) { - List aggs = Collections.emptyList(); + List aggs = Collections.emptyList(); if (response.getAggregations() != null) { aggs = response.getAggregations().asList(); } @@ -405,9 +404,9 @@ protected void handleResponse(SearchResponse response, ActionListener list logSearchResponse(response, log); } - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); if (aggs != null) { - Aggregation agg = aggs.get(Aggs.ROOT_GROUP_NAME); + InternalAggregation agg = aggs.get(Aggs.ROOT_GROUP_NAME); if (agg instanceof Filters filters) { handleBuckets(filters.getBuckets(), response); } else { diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/HistogramGroupByIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/HistogramGroupByIT.java index ccf9409d84bd8..797d592ef4571 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/HistogramGroupByIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/HistogramGroupByIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.transform.integration.continuous; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; @@ -29,7 +28,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97263") public class HistogramGroupByIT extends ContinuousTestCase { private static final String NAME = "continuous-histogram-pivot-test"; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java index 8ea0d5e62c6d3..f4b717a108762 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.transform.integration.continuous; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -26,7 +25,6 @@ import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97263") public class TermsGroupByIT extends ContinuousTestCase { private static final String NAME = "continuous-terms-pivot-test"; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 2f3ed29ea08fc..d4e03475af22e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; import org.elasticsearch.tasks.Task; @@ -180,7 +180,7 @@ protected void masterOperation( * @param aggs aggs returned by the search * @return feature usage map */ - private static Map getFeatureCounts(Aggregations aggs) { + private static Map getFeatureCounts(InternalAggregations aggs) { Filters filters = aggs.get(FEATURE_COUNTS); return filters.getBuckets().stream().collect(toMap(Filters.Bucket::getKeyAsString, Filters.Bucket::getDocCount)); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 189fb26e1f969..3412be813dcf6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -20,7 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -81,7 +81,7 @@ public void preview( buildSearchRequest(sourceConfig, timeout, numberOfBuckets), ActionListener.wrap(r -> { try { - final Aggregations aggregations = r.getAggregations(); + final InternalAggregations aggregations = r.getAggregations(); if (aggregations == null) { listener.onFailure( new ElasticsearchStatusException("Source indices have been deleted or closed.", RestStatus.BAD_REQUEST) @@ -158,7 +158,7 @@ public Tuple, Map> processSearchResponse( TransformIndexerStats stats, TransformProgress progress ) { - Aggregations aggregations = searchResponse.getAggregations(); + InternalAggregations aggregations = searchResponse.getAggregations(); // Treat this as a "we reached the end". // This should only happen when all underlying indices have gone away. Consequently, there is no more data to read. diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 0636555459632..684e3a085405d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -92,7 +92,7 @@ interface FieldCollector { * * @return true if this collection is done and there are no more changes to look for */ - boolean collectChangesFromAggregations(Aggregations aggregations); + boolean collectChangesFromAggregations(InternalAggregations aggregations); /** * Return a composite value source builder if the collector requires it. @@ -248,7 +248,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -314,7 +314,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -401,7 +401,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { final SingleValue lowerBoundResult = aggregations.get(minAggregationOutputName); final SingleValue upperBoundResult = aggregations.get(maxAggregationOutputName); @@ -510,7 +510,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { final SingleValue lowerBoundResult = aggregations.get(minAggregationOutputName); final SingleValue upperBoundResult = aggregations.get(maxAggregationOutputName); @@ -659,7 +659,7 @@ public Collection aggregateChanges() { } @Override - public boolean collectChangesFromAggregations(Aggregations aggregations) { + public boolean collectChangesFromAggregations(InternalAggregations aggregations) { return true; } @@ -743,7 +743,7 @@ public Collection getIndicesToQuery(TransformCheckpoint lastCheckpoint, @Override public Map processSearchResponse(final SearchResponse searchResponse) { - final Aggregations aggregations = searchResponse.getAggregations(); + final InternalAggregations aggregations = searchResponse.getAggregations(); if (aggregations == null) { return null; } diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index fe6a0b93ca7cd..ba5b97bbcb062 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -34,7 +34,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoGrid; @@ -136,9 +136,9 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio final InternalGeoBounds bounds = searchResponse.getAggregations() != null ? searchResponse.getAggregations().get(BOUNDS_FIELD) : null; - final Aggregations aggsWithoutGridAndBounds = searchResponse.getAggregations() == null + final InternalAggregations aggsWithoutGridAndBounds = searchResponse.getAggregations() == null ? null - : new Aggregations( + : InternalAggregations.from( searchResponse.getAggregations() .asList() .stream() diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index edee4fb515a81..5b7ea39079f28 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -106,7 +106,7 @@ public void testEmailFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("from"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 01400c3192289..97347de1ea23e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; @@ -103,7 +103,7 @@ public void testHttpFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("input_result_path"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 1f2810c4d82f3..7dde279fb90db 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.history; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -55,7 +55,7 @@ public void testIndexActionFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("index_action_indices"); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 2c86df184dc22..567d4acfa45e5 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -73,7 +73,7 @@ public void testHttpFields() throws Exception { response -> { assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - Aggregations aggs = response.getAggregations(); + InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); Terms terms = aggs.get("input_search_type"); From 2846aa707fc0d954bfc31ee65c1d4b5a273613d6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 1 Feb 2024 07:10:16 +0000 Subject: [PATCH 065/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-3d8ad990397 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9b8d8497b0219..7942f8de859de 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-1e36b461474 +lucene = 9.10.0-snapshot-3d8ad990397 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index bbbf62bb7b252..a45ee4632d234 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9a7de59c519d9fa728c0bc1a1c7bf92e551a4778 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 2 Feb 2024 07:09:49 +0000 Subject: [PATCH 066/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-4e73a4b2aca --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7942f8de859de..0c652ca3d2d18 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-3d8ad990397 +lucene = 9.10.0-snapshot-4e73a4b2aca bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a45ee4632d234..5f839142ddf05 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 38709e2330e38f679184d998c30739d2d4ae4421 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 3 Feb 2024 07:09:31 +0000 Subject: [PATCH 067/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-70bab56f6fe --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0c652ca3d2d18..70f9bae0dfa63 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-4e73a4b2aca +lucene = 9.10.0-snapshot-70bab56f6fe bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f839142ddf05..ce0542b3e6673 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6deb746e1ff549627a710c955329d6bf7508b5f3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 4 Feb 2024 07:09:41 +0000 Subject: [PATCH 068/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-70bab56f6fe --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ce0542b3e6673..6fd4c78907f28 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From f1213721dd6b7c051c7c8345f7564c7181e1bbf3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 5 Feb 2024 07:09:56 +0000 Subject: [PATCH 069/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-3da32a257be --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 70f9bae0dfa63..217330652047b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-70bab56f6fe +lucene = 9.10.0-snapshot-3da32a257be bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6fd4c78907f28..fcb4fbcff17d0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c572ca1c9528134a0c6648aca98de7253e8b1002 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 6 Feb 2024 07:10:14 +0000 Subject: [PATCH 070/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-c4df3e13ad8 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 217330652047b..123e9ad257a6f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-3da32a257be +lucene = 9.10.0-snapshot-c4df3e13ad8 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fcb4fbcff17d0..85914977867d4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e562d3b317d04c377f0638d3b983542dced49a5f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 7 Feb 2024 07:09:42 +0000 Subject: [PATCH 071/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-c4df3e13ad8 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 85914977867d4..b1289882ddeed 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2635,122 +2635,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e6f2e70f130abcd62c0e692217eb40feed2caf9c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 7 Feb 2024 14:41:29 +0100 Subject: [PATCH 072/107] Fix compile failure For some reason #105014 is included only partially, probably a bad merge. --- .../org/elasticsearch/index/rankeval/RatedRequestsTests.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index d7e50fe4e1a8b..c5a09d67d94d0 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -109,8 +109,6 @@ public static RatedRequest createTestItem(boolean forceRequest) { } public void testXContentRoundtrip() throws IOException { - assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); - RatedRequest testItem = createTestItem(randomBoolean()); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); @@ -301,8 +299,6 @@ public void testProfileNotAllowed() { * matter for parsing xContent */ public void testParseFromXContent() throws IOException { - assumeFalse("https://github.com/elastic/elasticsearch/issues/104570", Constants.WINDOWS); - String querySpecString = """ { "id": "my_qa_query", From 4661023fa5265f7e0eb8924f0d86ef77bc256269 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 8 Feb 2024 07:09:50 +0000 Subject: [PATCH 073/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-f3e2929a52c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 123e9ad257a6f..224859226e0da 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-c4df3e13ad8 +lucene = 9.10.0-snapshot-f3e2929a52c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b1289882ddeed..4a43364f53544 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 739072c00bc1e0fcb009de14aa7fa87cb124d1bf Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 8 Feb 2024 15:31:22 -0500 Subject: [PATCH 074/107] Fix compilation for ESKnnQuery objects (#105302) --- .../search/vectors/ESKnnByteVectorQuery.java | 18 ------------------ .../search/vectors/ESKnnFloatVectorQuery.java | 18 ------------------ 2 files changed, 36 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 091ce6f8a0f6d..05cf52fd23f24 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -8,35 +8,17 @@ package org.elasticsearch.search.vectors; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.util.Bits; import org.elasticsearch.search.profile.query.QueryProfiler; -import java.io.IOException; - public class ESKnnByteVectorQuery extends KnnByteVectorQuery implements ProfilingQuery { - private static final TopDocs NO_RESULTS = TopDocsCollector.EMPTY_TOPDOCS; private long vectorOpsCount; - private final byte[] target; public ESKnnByteVectorQuery(String field, byte[] target, int k, Query filter) { super(field, target, k, filter); - this.target = target; - } - - @Override - protected TopDocs approximateSearch(LeafReaderContext context, Bits acceptDocs, int visitedLimit) throws IOException { - // We increment visit limit by one to bypass a fencepost error in the collector - if (visitedLimit < Integer.MAX_VALUE) { - visitedLimit += 1; - } - TopDocs results = context.reader().searchNearestVectors(field, target, k, acceptDocs, visitedLimit); - return results != null ? results : NO_RESULTS; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index 4fa4db1f4ea95..e83a90a3c4df8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -8,24 +8,16 @@ package org.elasticsearch.search.vectors; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.util.Bits; import org.elasticsearch.search.profile.query.QueryProfiler; -import java.io.IOException; - public class ESKnnFloatVectorQuery extends KnnFloatVectorQuery implements ProfilingQuery { - private static final TopDocs NO_RESULTS = TopDocsCollector.EMPTY_TOPDOCS; private long vectorOpsCount; - private final float[] target; public ESKnnFloatVectorQuery(String field, float[] target, int k, Query filter) { super(field, target, k, filter); - this.target = target; } @Override @@ -35,16 +27,6 @@ protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { return topK; } - @Override - protected TopDocs approximateSearch(LeafReaderContext context, Bits acceptDocs, int visitedLimit) throws IOException { - // We increment visit limit by one to bypass a fencepost error in the collector - if (visitedLimit < Integer.MAX_VALUE) { - visitedLimit += 1; - } - TopDocs results = context.reader().searchNearestVectors(field, target, k, acceptDocs, visitedLimit); - return results != null ? results : NO_RESULTS; - } - @Override public void profile(QueryProfiler queryProfiler) { queryProfiler.setVectorOpsCount(vectorOpsCount); From 844e75fd3f1c9c67d47564ba1c88fbf4aa4bfa21 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 9 Feb 2024 07:09:53 +0000 Subject: [PATCH 075/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-06ee710c3c4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 224859226e0da..687b1a3135bd9 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-f3e2929a52c +lucene = 9.10.0-snapshot-06ee710c3c4 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4a43364f53544..92190d4ad7aa6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1be3e9a1b7cc7e3cdd9c238d3b1c5b7b91870fc7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 11 Feb 2024 07:09:10 +0000 Subject: [PATCH 076/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-f4dbab4e10e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 687b1a3135bd9..4a630e47dc5dd 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-06ee710c3c4 +lucene = 9.10.0-snapshot-f4dbab4e10e bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 92190d4ad7aa6..389e5c933dedf 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 42b658570893abd3ec8f602bd7ab73c524710f94 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 12 Feb 2024 07:09:56 +0000 Subject: [PATCH 077/107] [Automated] Update Lucene snapshot to 9.10.0-snapshot-695c0ac8450 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4a630e47dc5dd..645ae67927f6b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.10.0-snapshot-f4dbab4e10e +lucene = 9.10.0-snapshot-695c0ac8450 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 389e5c933dedf..45c12ab8983e3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2633,124 +2633,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a55ce085e7b9f1b1e3eaf7df3e0f9559a2d76351 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Feb 2024 22:23:45 +0100 Subject: [PATCH 078/107] Move to final 9.10.0 artifacts. --- build-tools-internal/version.properties | 2 +- build.gradle | 5 + gradle/verification-metadata.xml | 144 ++++++++++++------------ 3 files changed, 78 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 46352fb23e164..6750ac59204ad 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.14.0 -lucene = 9.10.0-snapshot-695c0ac8450 +lucene = 9.10.0 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac diff --git a/build.gradle b/build.gradle index c0b613beefea4..e4e8d62766dcc 100644 --- a/build.gradle +++ b/build.gradle @@ -195,6 +195,11 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' + + repositories { + // TODO: Temporary for Lucene RC builds. REMOVE + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.10.0-RC1-rev-695c0ac84508438302cd346a812cfa2fdc5a10df/lucene/maven" } + } } allprojects { diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 24a65542caf35..56964aca95357 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2648,124 +2648,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a04712b383928cde359bd1ef95256bbcb27f98d9 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Feb 2024 22:27:09 +0100 Subject: [PATCH 079/107] Update docs/changelog/105578.yaml --- docs/changelog/105578.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/105578.yaml diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml new file mode 100644 index 0000000000000..cbc58e9636a4d --- /dev/null +++ b/docs/changelog/105578.yaml @@ -0,0 +1,5 @@ +pr: 105578 +summary: Upgrade to Lucene 9.10.0 +area: Search +type: enhancement +issues: [] From 950d46a9d104b90646e590eca23441a1b90300e5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Feb 2024 22:33:09 +0100 Subject: [PATCH 080/107] Fix compilation. --- .../java/org/elasticsearch/index/mapper/FieldTypeTestCase.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index d1a07cd0ee089..d4c6f8f3df873 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -146,7 +146,8 @@ public FieldInfo getFieldInfoWithName(String name) { 1, VectorEncoding.BYTE, VectorSimilarityFunction.COSINE, - randomBoolean() + randomBoolean(), + false ); } } From 98ceb06d9337abb69dd6a60ef949a33de7ab5607 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 20 Feb 2024 11:41:02 +0100 Subject: [PATCH 081/107] Remove temporary repository, go to Maven central instead. --- build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/build.gradle b/build.gradle index e4e8d62766dcc..c0b613beefea4 100644 --- a/build.gradle +++ b/build.gradle @@ -195,11 +195,6 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' - - repositories { - // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.10.0-RC1-rev-695c0ac84508438302cd346a812cfa2fdc5a10df/lucene/maven" } - } } allprojects { From 40af2756f921afa902994c6dfcd56948c66bcfb2 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:35:33 -0500 Subject: [PATCH 082/107] updating release notes --- docs/changelog/105578.yaml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml index cbc58e9636a4d..5b7ebd250be0e 100644 --- a/docs/changelog/105578.yaml +++ b/docs/changelog/105578.yaml @@ -1,5 +1,13 @@ pr: 105578 summary: Upgrade to Lucene 9.10.0 area: Search -type: enhancement -issues: [] +type: feature +issues: + - 104556 +highlight: + title: "New Lucene 9.10 release" + body: |- + - https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. + - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNWS graph search + - https://github.com/apache/lucene/pull/13033: PointRangeQuery now exits earlier on segments whose values don't intersect with the query range. When a PointRangeQuery is a required clause of a boolean query, this helps save work on other required clauses of the same boolean query. + - https://github.com/apache/lucene/pull/13026: Propagate minimum competitive score in ReqOptSumScorer. From c9989708910eca52a99628024b8bd4291dc97393 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 22 Feb 2024 14:21:59 +0100 Subject: [PATCH 083/107] HNWS -> HNSW --- docs/changelog/105578.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml index 5b7ebd250be0e..d7420096cb178 100644 --- a/docs/changelog/105578.yaml +++ b/docs/changelog/105578.yaml @@ -8,6 +8,6 @@ highlight: title: "New Lucene 9.10 release" body: |- - https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. - - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNWS graph search - - https://github.com/apache/lucene/pull/13033: PointRangeQuery now exits earlier on segments whose values don't intersect with the query range. When a PointRangeQuery is a required clause of a boolean query, this helps save work on other required clauses of the same boolean query. - - https://github.com/apache/lucene/pull/13026: Propagate minimum competitive score in ReqOptSumScorer. + - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search + - https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. + - https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. From 987b778bb676e2f9790c2be665792a4e201bbb58 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Mar 2024 10:46:43 +0100 Subject: [PATCH 084/107] Extract cold(ish) paths in some SearchPhases (#105884) The way these phases are currently executed often means that large parts of the phase run code never runs. Lets move it to separate methods to help the compiler and more importantly, make profiling easier to interpret. --- .../action/search/ExpandSearchPhase.java | 102 +++++++++--------- .../action/search/FetchLookupFieldsPhase.java | 4 + ...SearchScrollQueryThenFetchAsyncAction.java | 3 + 3 files changed, 59 insertions(+), 50 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 149cdb9206b34..48c2f1890ba08 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -44,66 +44,68 @@ final class ExpandSearchPhase extends SearchPhase { * Returns true iff the search request has inner hits and needs field collapsing */ private boolean isCollapseRequest() { - final SearchRequest searchRequest = context.getRequest(); - return searchRequest.source() != null - && searchRequest.source().collapse() != null - && searchRequest.source().collapse().getInnerHits().isEmpty() == false; + final var searchSource = context.getRequest().source(); + return searchSource != null && searchSource.collapse() != null && searchSource.collapse().getInnerHits().isEmpty() == false; } @Override public void run() { - if (isCollapseRequest() && searchHits.getHits().length > 0) { - SearchRequest searchRequest = context.getRequest(); - CollapseBuilder collapseBuilder = searchRequest.source().collapse(); - final List innerHitBuilders = collapseBuilder.getInnerHits(); - MultiSearchRequest multiRequest = new MultiSearchRequest(); - if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) { - multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests()); + if (isCollapseRequest() == false || searchHits.getHits().length == 0) { + onPhaseDone(); + } else { + doRun(); + } + } + + private void doRun() { + SearchRequest searchRequest = context.getRequest(); + CollapseBuilder collapseBuilder = searchRequest.source().collapse(); + final List innerHitBuilders = collapseBuilder.getInnerHits(); + MultiSearchRequest multiRequest = new MultiSearchRequest(); + if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) { + multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests()); + } + for (SearchHit hit : searchHits.getHits()) { + BoolQueryBuilder groupQuery = new BoolQueryBuilder(); + Object collapseValue = hit.field(collapseBuilder.getField()).getValue(); + if (collapseValue != null) { + groupQuery.filter(QueryBuilders.matchQuery(collapseBuilder.getField(), collapseValue)); + } else { + groupQuery.mustNot(QueryBuilders.existsQuery(collapseBuilder.getField())); + } + QueryBuilder origQuery = searchRequest.source().query(); + if (origQuery != null) { + groupQuery.must(origQuery); + } + for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { + CollapseBuilder innerCollapseBuilder = innerHitBuilder.getInnerCollapseBuilder(); + SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder, innerCollapseBuilder).query(groupQuery) + .postFilter(searchRequest.source().postFilter()) + .runtimeMappings(searchRequest.source().runtimeMappings()); + SearchRequest groupRequest = new SearchRequest(searchRequest); + groupRequest.source(sourceBuilder); + multiRequest.add(groupRequest); } + } + context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), ActionListener.wrap(response -> { + Iterator it = response.iterator(); for (SearchHit hit : searchHits.getHits()) { - BoolQueryBuilder groupQuery = new BoolQueryBuilder(); - Object collapseValue = hit.field(collapseBuilder.getField()).getValue(); - if (collapseValue != null) { - groupQuery.filter(QueryBuilders.matchQuery(collapseBuilder.getField(), collapseValue)); - } else { - groupQuery.mustNot(QueryBuilders.existsQuery(collapseBuilder.getField())); - } - QueryBuilder origQuery = searchRequest.source().query(); - if (origQuery != null) { - groupQuery.must(origQuery); - } for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { - CollapseBuilder innerCollapseBuilder = innerHitBuilder.getInnerCollapseBuilder(); - SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder, innerCollapseBuilder).query( - groupQuery - ).postFilter(searchRequest.source().postFilter()).runtimeMappings(searchRequest.source().runtimeMappings()); - SearchRequest groupRequest = new SearchRequest(searchRequest); - groupRequest.source(sourceBuilder); - multiRequest.add(groupRequest); - } - } - context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), ActionListener.wrap(response -> { - Iterator it = response.iterator(); - for (SearchHit hit : searchHits.getHits()) { - for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { - MultiSearchResponse.Item item = it.next(); - if (item.isFailure()) { - context.onPhaseFailure(this, "failed to expand hits", item.getFailure()); - return; - } - SearchHits innerHits = item.getResponse().getHits(); - if (hit.getInnerHits() == null) { - hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); - } - hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); - innerHits.mustIncRef(); + MultiSearchResponse.Item item = it.next(); + if (item.isFailure()) { + context.onPhaseFailure(this, "failed to expand hits", item.getFailure()); + return; } + SearchHits innerHits = item.getResponse().getHits(); + if (hit.getInnerHits() == null) { + hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); + } + hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + innerHits.mustIncRef(); } - onPhaseDone(); - }, context::onFailure)); - } else { + } onPhaseDone(); - } + }, context::onFailure)); } private static SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options, CollapseBuilder innerCollapseBuilder) { diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java index 9c50d534ac4ce..0605e23fc343c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java @@ -75,6 +75,10 @@ public void run() { context.sendSearchResponse(searchResponse, queryResults); return; } + doRun(clusters); + } + + private void doRun(List clusters) { final MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); for (Cluster cluster : clusters) { // Do not prepend the clusterAlias to the targetIndex if the search request is already on the remote cluster. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index bad0ed488d03b..793a5bfe4e9d4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -73,7 +73,10 @@ public void run() { sendResponse(reducedQueryPhase, fetchResults); return; } + doRun(scoreDocs, reducedQueryPhase); + } + private void doRun(ScoreDoc[] scoreDocs, SearchPhaseController.ReducedQueryPhase reducedQueryPhase) { final List[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(queryResults.length(), scoreDocs); final ScoreDoc[] lastEmittedDocPerShard = SearchPhaseController.getLastEmittedDocPerShard( reducedQueryPhase, From b37e6e9d55862e0095166a7756945690901d8b56 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Mar 2024 11:05:32 +0100 Subject: [PATCH 085/107] [Connectors API] Guard cancel and update error sync job endpoints with state machine (#105722) --- .../430_connector_sync_job_cancel.yml | 27 +++- .../450_connector_sync_job_error.yml | 13 +- .../syncjob/ConnectorSyncJobIndexService.java | 148 +++++++++++------ ...ncJobInvalidStatusTransitionException.java | 31 ++++ .../syncjob/ConnectorSyncJobStateMachine.java | 13 ++ .../ConnectorSyncJobIndexServiceTests.java | 152 +++++++++++++++++- .../ConnectorSyncJobStateMachineTests.java | 27 ++++ 7 files changed, 350 insertions(+), 61 deletions(-) create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobInvalidStatusTransitionException.java diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml index 633c1a8cecb7b..eea4ca197614d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml @@ -13,7 +13,7 @@ setup: service_type: super-connector --- -"Cancel a Connector Sync Job": +"Cancel a pending Connector Sync Job - transition to canceled directly": - do: connector_sync_job.post: body: @@ -33,8 +33,31 @@ setup: connector_sync_job.get: connector_sync_job_id: $sync-job-id-to-cancel - - match: { status: "canceling"} + - set: { cancelation_requested_at: cancelation_requested_at } + - match: { status: "canceled"} + - match: { completed_at: $cancelation_requested_at } + - match: { canceled_at: $cancelation_requested_at } + +--- +"Cancel a canceled Connector Sync Job - invalid state transition from canceled to canceling": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + + - set: { id: sync-job-id-to-cancel } + + - do: + connector_sync_job.cancel: + connector_sync_job_id: $sync-job-id-to-cancel + + - do: + catch: bad_request + connector_sync_job.cancel: + connector_sync_job_id: $sync-job-id-to-cancel --- "Cancel a Connector Sync Job - Connector Sync Job does not exist": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml index a565d28c3e788..78cfdb845b10e 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml @@ -13,7 +13,7 @@ setup: service_type: super-connector --- -"Set an error for a connector sync job": +"Set an error for a pending connector sync job - invalid state transition from pending to error": - do: connector_sync_job.post: body: @@ -24,21 +24,12 @@ setup: - set: { id: id } - do: + catch: bad_request connector_sync_job.error: connector_sync_job_id: $id body: error: error - - match: { result: updated } - - - do: - connector_sync_job.get: - connector_sync_job_id: $id - - - match: { error: error } - - match: { status: error } - - --- "Set an error for a Connector Sync Job - Connector Sync Job does not exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 910f0605ef7aa..3ac598fd58ee8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -31,6 +32,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -219,38 +221,72 @@ public void getConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { - Instant cancellationRequestedAt = Instant.now(); + try { + getConnectorSyncJob(connectorSyncJobId, listener.delegateFailure((getSyncJobListener, syncJobSearchResult) -> { + Map syncJobFieldsToUpdate; + Instant now = Instant.now(); - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .doc( - Map.of( - ConnectorSyncJob.STATUS_FIELD.getPreferredName(), - ConnectorSyncStatus.CANCELING, - ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), - cancellationRequestedAt - ) - ); + ConnectorSyncStatus prevStatus = getConnectorSyncJobStatusFromSearchResult(syncJobSearchResult); - try { - client.update( - updateRequest, - new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { - if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); - return; + try { + if (ConnectorSyncStatus.PENDING.equals(prevStatus) || ConnectorSyncStatus.SUSPENDED.equals(prevStatus)) { + // A pending or suspended non-running sync job is set to `canceled` directly + // without a transition to the in-between `canceling` status + ConnectorSyncStatus nextStatus = ConnectorSyncStatus.CANCELED; + ConnectorSyncJobStateMachine.assertValidStateTransition(prevStatus, nextStatus); + + syncJobFieldsToUpdate = Map.of( + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + nextStatus, + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), + now, + ConnectorSyncJob.CANCELED_AT_FIELD.getPreferredName(), + now, + ConnectorSyncJob.COMPLETED_AT_FIELD.getPreferredName(), + now + ); + } else { + ConnectorSyncStatus nextStatus = ConnectorSyncStatus.CANCELING; + ConnectorSyncJobStateMachine.assertValidStateTransition(prevStatus, nextStatus); + + syncJobFieldsToUpdate = Map.of( + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + nextStatus, + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), + now + ); } - l.onResponse(updateResponse); - }) - ); + } catch (ConnectorSyncJobInvalidStatusTransitionException e) { + getSyncJobListener.onFailure(new ElasticsearchStatusException(e.getMessage(), RestStatus.BAD_REQUEST, e)); + return; + } + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ).doc(syncJobFieldsToUpdate); + + client.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>( + connectorSyncJobId, + listener, + (indexNotFoundListener, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + indexNotFoundListener.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + indexNotFoundListener.onResponse(updateResponse); + } + ) + ); + })); } catch (Exception e) { listener.onFailure(e); } @@ -415,6 +451,12 @@ public void updateConnectorSyncJobIngestionStats( } + private ConnectorSyncStatus getConnectorSyncJobStatusFromSearchResult(ConnectorSyncJobSearchResult searchResult) { + return ConnectorSyncStatus.connectorSyncStatus( + (String) searchResult.getResultMap().get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + } + private void getSyncJobConnectorInfo(String connectorId, ActionListener listener) { try { @@ -485,29 +527,45 @@ FilteringRules transformConnectorFilteringToSyncJobRepresentation(List listener) { - final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .doc( - Map.of( - ConnectorSyncJob.ERROR_FIELD.getPreferredName(), - error, - ConnectorSyncJob.STATUS_FIELD.getPreferredName(), - ConnectorSyncStatus.ERROR + try { + getConnectorSyncJob(connectorSyncJobId, listener.delegateFailure((getSyncJobListener, syncJobSearchResult) -> { + ConnectorSyncStatus prevStatus = getConnectorSyncJobStatusFromSearchResult(syncJobSearchResult); + ConnectorSyncStatus nextStatus = ConnectorSyncStatus.ERROR; + + try { + ConnectorSyncJobStateMachine.assertValidStateTransition(prevStatus, nextStatus); + } catch (ConnectorSyncJobInvalidStatusTransitionException e) { + getSyncJobListener.onFailure(new ElasticsearchStatusException(e.getMessage(), RestStatus.BAD_REQUEST, e)); + return; + } + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE ) - ); + .doc( + Map.of( + ConnectorSyncJob.ERROR_FIELD.getPreferredName(), + error, + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + nextStatus + ) + ); - try { - client.update( - updateRequest, - new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { - if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); - return; - } - l.onResponse(updateResponse); - }) - ); + client.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>( + connectorSyncJobId, + listener, + (indexNotFoundListener, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + indexNotFoundListener.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + indexNotFoundListener.onResponse(updateResponse); + } + ) + ); + })); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobInvalidStatusTransitionException.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobInvalidStatusTransitionException.java new file mode 100644 index 0000000000000..3ded62afa5d14 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobInvalidStatusTransitionException.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +public class ConnectorSyncJobInvalidStatusTransitionException extends Exception { + + /** + * Constructs a {@link ConnectorSyncJobInvalidStatusTransitionException} exception with a detailed message. + * + * @param current The current {@link ConnectorSyncStatus} of the {@link ConnectorSyncJob}. + * @param next The attempted next {@link ConnectorSyncStatus} of the {@link ConnectorSyncJob}. + */ + public ConnectorSyncJobInvalidStatusTransitionException(ConnectorSyncStatus current, ConnectorSyncStatus next) { + super( + "Invalid transition attempt from [" + + current + + "] to [" + + next + + "]. Such a " + + ConnectorSyncStatus.class.getSimpleName() + + " transition is not supported by the Connector Protocol." + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java index 7a7a05bd5e455..dc624b5bf8ba1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachine.java @@ -47,4 +47,17 @@ public class ConnectorSyncJobStateMachine { public static boolean isValidTransition(ConnectorSyncStatus current, ConnectorSyncStatus next) { return VALID_TRANSITIONS.getOrDefault(current, Collections.emptySet()).contains(next); } + + /** + * Throws {@link ConnectorSyncJobInvalidStatusTransitionException} if a + * transition from one {@link ConnectorSyncStatus} to another is invalid. + * + * @param current The current {@link ConnectorSyncStatus} of the {@link ConnectorSyncJob}. + * @param next The proposed next {@link ConnectorSyncStatus} of the {@link ConnectorSyncJob}. + */ + public static void assertValidStateTransition(ConnectorSyncStatus current, ConnectorSyncStatus next) + throws ConnectorSyncJobInvalidStatusTransitionException { + if (isValidTransition(current, next)) return; + throw new ConnectorSyncJobInvalidStatusTransitionException(current, next); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 4a7a3e76ecf42..d486a27f1b728 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; @@ -17,6 +18,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -80,7 +82,6 @@ public void setup() throws Exception { } private String createConnector() throws IOException, InterruptedException, ExecutionException, TimeoutException { - Connector connector = ConnectorTestUtils.getRandomConnector(); final IndexRequest indexRequest = new IndexRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) @@ -229,7 +230,8 @@ public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCheckInConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } - public void testCancelConnectorSyncJob() throws Exception { + public void testCancelConnectorSyncJob_WithStatusInProgress_ExpectNextStatusCanceling() throws Exception { + // Create connector sync job PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connectorOneId ); @@ -247,6 +249,10 @@ public void testCancelConnectorSyncJob() throws Exception { assertThat(cancellationRequestedAtBeforeUpdate, nullValue()); assertThat(syncStatusBeforeUpdate, not(equalTo(ConnectorSyncStatus.CANCELING))); + // Set sync job status to `in_progress` + updateConnectorSyncJobStatusWithoutStateMachineGuard(syncJobId, ConnectorSyncStatus.IN_PROGRESS); + + // Cancel sync job UpdateResponse updateResponse = awaitCancelConnectorSyncJob(syncJobId); Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); @@ -263,6 +269,103 @@ public void testCancelConnectorSyncJob() throws Exception { assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); } + public void testCancelConnectorSyncJob_WithPendingState_ExpectNextStatusCanceled() throws Exception { + // Create pending sync job + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusBeforeUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Object canceledAtBeforeUpdate = syncJobSourceBeforeUpdate.get(ConnectorSyncJob.CANCELED_AT_FIELD.getPreferredName()); + + assertThat(syncJobId, notNullValue()); + assertThat(canceledAtBeforeUpdate, nullValue()); + assertThat(syncStatusBeforeUpdate, not(equalTo(ConnectorSyncStatus.CANCELED))); + + // Cancel sync job + UpdateResponse updateResponse = awaitCancelConnectorSyncJob(syncJobId); + + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusAfterUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Instant canceledAtAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.CANCELED_AT_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(canceledAtAfterUpdate, notNullValue()); + assertThat(syncStatusAfterUpdate, equalTo(ConnectorSyncStatus.CANCELED)); + assertFieldsExceptSyncStatusAndCanceledAndCompletedTimestampsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testCancelConnectorSyncJob_WithSuspendedState_ExpectNextStatusCanceled() throws Exception { + // Create pending sync job + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusBeforeUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Object canceledAtBeforeUpdate = syncJobSourceBeforeUpdate.get(ConnectorSyncJob.CANCELED_AT_FIELD.getPreferredName()); + + assertThat(syncJobId, notNullValue()); + assertThat(canceledAtBeforeUpdate, nullValue()); + assertThat(syncStatusBeforeUpdate, not(equalTo(ConnectorSyncStatus.CANCELED))); + + // Set sync job to suspended + updateConnectorSyncJobStatusWithoutStateMachineGuard(syncJobId, ConnectorSyncStatus.SUSPENDED); + + // Cancel sync job + UpdateResponse updateResponse = awaitCancelConnectorSyncJob(syncJobId); + + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusAfterUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Instant canceledAtAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.CANCELED_AT_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(canceledAtAfterUpdate, notNullValue()); + assertThat(syncStatusAfterUpdate, equalTo(ConnectorSyncStatus.CANCELED)); + assertFieldsExceptSyncStatusAndCanceledAndCompletedTimestampsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testCancelConnectorSyncJob_WithCompletedState_ExpectStatusException() throws Exception { + // Create sync job + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusBeforeUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Object cancellationRequestedAtBeforeUpdate = syncJobSourceBeforeUpdate.get( + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName() + ); + + assertThat(syncJobId, notNullValue()); + assertThat(cancellationRequestedAtBeforeUpdate, nullValue()); + assertThat(syncStatusBeforeUpdate, not(equalTo(ConnectorSyncStatus.CANCELING))); + + // Set sync job status to `completed` + updateConnectorSyncJobStatusWithoutStateMachineGuard(syncJobId, ConnectorSyncStatus.COMPLETED); + + // Cancel sync job + assertThrows(ElasticsearchStatusException.class, () -> awaitCancelConnectorSyncJob(syncJobId)); + } + public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } @@ -332,7 +435,7 @@ public void testListConnectorSyncJobs() throws Exception { assertTrue(secondSyncJob.getCreatedAt().isAfter(firstSyncJob.getCreatedAt())); } - public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancelled_ExpectOnePending() throws Exception { + public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCanceled_ExpectOnePending() throws Exception { PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connectorOneId ); @@ -547,12 +650,17 @@ public void testListConnectorSyncJobs_WithNoSyncJobs_ReturnEmptyResult() throws } public void testUpdateConnectorSyncJobError() throws Exception { + // Create sync job PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connectorOneId ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); + // Set sync job to in progress + updateConnectorSyncJobStatusWithoutStateMachineGuard(syncJobId, ConnectorSyncStatus.IN_PROGRESS); + + // Set sync job error UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); String errorInRequest = request.getError(); @@ -575,6 +683,18 @@ public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException ); } + public void testUpdateConnectorSyncJobError_WithStatusPending_ExpectStatusException() throws Exception { + // Create sync job + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + // Try to set error + assertThrows(ElasticsearchStatusException.class, () -> awaitUpdateConnectorSyncJob(syncJobId, "some error")); + } + public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connectorOneId @@ -733,6 +853,22 @@ private static void assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNot ); } + private static void assertFieldsExceptSyncStatusAndCanceledAndCompletedTimestampsDidNotUpdate( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate + ) { + assertFieldsDidNotUpdateExceptFieldList( + syncJobSourceBeforeUpdate, + syncJobSourceAfterUpdate, + List.of( + ConnectorSyncJob.STATUS_FIELD, + ConnectorSyncJob.CANCELED_AT_FIELD, + ConnectorSyncJob.COMPLETED_AT_FIELD, + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD + ) + ); + } + private static void assertFieldsExceptLastSeenDidNotUpdate( Map syncJobSourceBeforeUpdate, Map syncJobSourceAfterUpdate @@ -1006,4 +1142,14 @@ public void onFailure(Exception e) { return response; } + private String updateConnectorSyncJobStatusWithoutStateMachineGuard(String syncJobId, ConnectorSyncStatus syncStatus) throws Exception { + final UpdateRequest updateRequest = new UpdateRequest(ConnectorSyncJobIndexService.CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .doc(Map.of(ConnectorSyncJob.STATUS_FIELD.getPreferredName(), syncStatus)); + + ActionFuture index = client().update(updateRequest); + + // wait 10 seconds for connector creation + return index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getId(); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java index b702a5ffa7eef..3e7bf80dcfb25 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobStateMachineTests.java @@ -86,4 +86,31 @@ public void testTransitionToSameState() { ); } } + + public void testAssertValidStateTransition_ExpectExceptionOnInvalidTransition() { + assertThrows( + ConnectorSyncJobInvalidStatusTransitionException.class, + () -> ConnectorSyncJobStateMachine.assertValidStateTransition(ConnectorSyncStatus.PENDING, ConnectorSyncStatus.CANCELING) + ); + } + + public void testAssertValidStateTransition_ExpectNoExceptionOnValidTransition() { + ConnectorSyncStatus prevStatus = ConnectorSyncStatus.PENDING; + ConnectorSyncStatus nextStatus = ConnectorSyncStatus.CANCELED; + + try { + ConnectorSyncJobStateMachine.assertValidStateTransition(prevStatus, nextStatus); + } catch (ConnectorSyncJobInvalidStatusTransitionException e) { + fail( + "Did not expect " + + ConnectorSyncJobInvalidStatusTransitionException.class.getSimpleName() + + " to be thrown for valid state transition [" + + prevStatus + + "] -> " + + "[" + + nextStatus + + "]." + ); + } + } } From 0b664dd4d42dc3756b2e7aa39cc1556e7d139f09 Mon Sep 17 00:00:00 2001 From: Serena Chou Date: Mon, 4 Mar 2024 11:31:00 +0100 Subject: [PATCH 086/107] Update README.asciidoc (#103597) * Update README.asciidoc updating the readme with the latest blurb from PMM and a reference to RAG + a few links to search labs content. * Tweak verbiage --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- README.asciidoc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/README.asciidoc b/README.asciidoc index a8b3704887e5b..dc27735d3c015 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -1,20 +1,24 @@ = Elasticsearch -Elasticsearch is a distributed, RESTful search engine optimized for speed and relevance on production-scale workloads. You can use Elasticsearch to perform real-time search over massive datasets for applications including: +Elasticsearch is a distributed search and analytics engine optimized for speed and relevance on production-scale workloads. Elasticsearch is the foundation of Elastic's open Stack platform. Search in near real-time over massive datasets, perform vector searches, integrate with generative AI applications, and much more. -* Vector search +Use cases enabled by Elasticsearch include: + +* https://www.elastic.co/search-labs/blog/articles/retrieval-augmented-generation-rag[Retrieval Augmented Generation (RAG)] +* https://www.elastic.co/search-labs/blog/categories/vector-search[Vector search] * Full-text search * Logs * Metrics * Application performance monitoring (APM) * Security logs - \... and more! To learn more about Elasticsearch's features and capabilities, see our https://www.elastic.co/products/elasticsearch[product page]. +To access information on https://www.elastic.co/search-labs/blog/categories/ml-research[machine learning innovations] and the latest https://www.elastic.co/search-labs/blog/categories/lucene[Lucene contributions from Elastic], more information can be found in https://www.elastic.co/search-labs[Search Labs]. + [[get-started]] == Get started From cbb09d2676ecd0ed1ad49dd7a46f284c142015ad Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Mar 2024 10:45:45 +0000 Subject: [PATCH 087/107] AwaitsFix for #101008 --- .../test/java/org/elasticsearch/index/shard/IndexShardTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index b83334ec68fdd..97bf9f4e380fa 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -3800,6 +3800,7 @@ public void testIsSearchIdle() throws Exception { closeShards(primary); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101008") @TestIssueLogging( issueUrl = "https://github.com/elastic/elasticsearch/issues/101008", value = "org.elasticsearch.index.shard.IndexShard:TRACE" From 294fa4d037f52c3f91328f596cbd28fbe70ea3ba Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Mar 2024 11:53:53 +0100 Subject: [PATCH 088/107] [Connectors API] Add more distinct test cases to PutConnectorSecretActionTests (#105809) --- .../action/PutConnectorSecretActionTests.java | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java index b7c7453611bdf..7940017318336 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java @@ -25,11 +25,34 @@ public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError( } public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { - PutConnectorSecretRequest requestWithMissingValue = new PutConnectorSecretRequest("", ""); - ActionRequestValidationException exception = requestWithMissingValue.validate(); + PutConnectorSecretRequest requestWithEmptyId = new PutConnectorSecretRequest("", randomAlphaOfLength(10)); + ActionRequestValidationException exception = requestWithEmptyId.validate(); assertThat(exception, notNullValue()); assertThat(exception.getMessage(), containsString("[id] cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenConnectorSecretIdIsNull_ExpectValidationError() { + PutConnectorSecretRequest requestWithNullId = new PutConnectorSecretRequest(null, randomAlphaOfLength(10)); + ActionRequestValidationException exception = requestWithNullId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[id] cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenConnectorSecretValueIsEmpty_ExpectValidationError() { + PutConnectorSecretRequest requestWithEmptyValue = new PutConnectorSecretRequest(randomAlphaOfLength(10), ""); + ActionRequestValidationException exception = requestWithEmptyValue.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[value] cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenConnectorSecretValueIsNull_ExpectValidationError() { + PutConnectorSecretRequest requestWithEmptyValue = new PutConnectorSecretRequest(randomAlphaOfLength(10), null); + ActionRequestValidationException exception = requestWithEmptyValue.validate(); + + assertThat(exception, notNullValue()); assertThat(exception.getMessage(), containsString("[value] cannot be [null] or [\"\"]")); } } From 3b8177f2a1cea76ce63c18eb0bd10d0c22804caa Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Mar 2024 11:54:17 +0100 Subject: [PATCH 089/107] [Connectors API] Make validation error message consistent and add test case for null value. (#105810) --- .../action/PostConnectorSecretRequest.java | 16 ++++++++-------- .../action/PostConnectorSecretActionTests.java | 13 +++++++++++-- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java index 2e565dece7eca..90672f7ca7120 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretRequest.java @@ -21,15 +21,15 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class PostConnectorSecretRequest extends ActionRequest { - public static final ParseField VALUE_FIELD = new ParseField("value"); + private static final ParseField VALUE_FIELD = new ParseField("value"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "post_secret_request", - args -> { - return new PostConnectorSecretRequest((String) args[0]); - } + args -> new PostConnectorSecretRequest((String) args[0]) ); static { @@ -75,13 +75,13 @@ public void writeTo(StreamOutput out) throws IOException { @Override public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (Strings.isNullOrEmpty(this.value)) { - ActionRequestValidationException exception = new ActionRequestValidationException(); - exception.addValidationError("value is missing"); - return exception; + validationException = addValidationError("[value] of the connector secret cannot be [null] or [\"\"]", validationException); } - return null; + return validationException; } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java index f1e1a670b2748..a11de91de739a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PostConnectorSecretActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.NULL_STRING; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -24,11 +25,19 @@ public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError( assertThat(exception, nullValue()); } - public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + public void testValidate_WhenConnectorSecretIdIsNull_ExpectValidationError() { + PostConnectorSecretRequest requestWithNullValue = new PostConnectorSecretRequest(NULL_STRING); + ActionRequestValidationException exception = requestWithNullValue.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[value] of the connector secret cannot be [null] or [\"\"]")); + } + + public void testValidate_WhenConnectorSecretIdIsBlank_ExpectValidationError() { PostConnectorSecretRequest requestWithMissingValue = new PostConnectorSecretRequest(""); ActionRequestValidationException exception = requestWithMissingValue.validate(); assertThat(exception, notNullValue()); - assertThat(exception.getMessage(), containsString("value is missing")); + assertThat(exception.getMessage(), containsString("[value] of the connector secret cannot be [null] or [\"\"]")); } } From c97160a8574679f0bec0b1b8c585339f9f400ba9 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Mon, 4 Mar 2024 10:56:01 +0000 Subject: [PATCH 090/107] [ILM] Delete step deletes data stream with only one index (#105772) We seem to have a couple of checks to make sure we delete the data stream when the last index reaches the delete step however, these checks seem a bit contradictory. Namely, the first check makes use if `Index` equality (UUID included) and the second just checks the index name. So if a data stream with just one index (the write index) is restored from snapshot (different UUID) we would've failed the first index equality check and go through the second check `dataStream.getWriteIndex().getName().equals(indexName)` and fail the delete step (in a non-retryable way :( ) because we don't want to delete the write index of a data stream (but we really do if the data stream has only one index) This PR makes 2 changes: 1. use the index name equality everywhere in the step (we already looked up the index abstraction and the parent data stream, so we know for sure the managed index is part of the data stream) 2. do not throw exception when we got here via a write index that is NOT the last index in the data stream but report the exception so we keep retrying this step (i.e. this enables our users to simply execute a manual rollover and the index is deleted by ILM eventually on retry) --- docs/changelog/105772.yaml | 5 ++ .../xpack/core/ilm/DeleteStep.java | 8 ++- .../xpack/core/ilm/DeleteStepTests.java | 65 ++++++++++--------- .../xpack/ilm/TimeSeriesDataStreamsIT.java | 46 +++++++++++++ 4 files changed, 92 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/105772.yaml diff --git a/docs/changelog/105772.yaml b/docs/changelog/105772.yaml new file mode 100644 index 0000000000000..73680aa04e5ab --- /dev/null +++ b/docs/changelog/105772.yaml @@ -0,0 +1,5 @@ +pr: 105772 +summary: "[ILM] Delete step deletes data stream with only one index" +area: ILM+SLM +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java index 755e453790257..ba6b6f9366c61 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java @@ -41,7 +41,10 @@ public void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState cu if (dataStream != null) { assert dataStream.getWriteIndex() != null : dataStream.getName() + " has no write index"; - if (dataStream.getIndices().size() == 1 && dataStream.getIndices().get(0).equals(indexMetadata.getIndex())) { + + // using index name equality across this if/else branch as the UUID of the index might change via restoring a data stream + // with one index from snapshot + if (dataStream.getIndices().size() == 1 && dataStream.getWriteIndex().getName().equals(indexName)) { // This is the last index in the data stream, the entire stream // needs to be deleted, because we can't have an empty data stream DeleteDataStreamAction.Request deleteReq = new DeleteDataStreamAction.Request(new String[] { dataStream.getName() }); @@ -62,7 +65,8 @@ public void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState cu policyName ); logger.debug(errorMessage); - throw new IllegalStateException(errorMessage); + listener.onFailure(new IllegalStateException(errorMessage)); + return; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java index 32e9148de067c..5851ebe2fb3c9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java @@ -21,7 +21,10 @@ import java.util.List; +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; public class DeleteStepTests extends AbstractStepTestCase { @@ -76,7 +79,7 @@ public void testDeleted() throws Exception { assertEquals(indexMetadata.getIndex().getName(), request.indices()[0]); listener.onResponse(null); return null; - }).when(indicesClient).delete(Mockito.any(), Mockito.any()); + }).when(indicesClient).delete(any(), any()); DeleteStep step = createRandomInstance(); ClusterState clusterState = ClusterState.builder(emptyClusterState()) @@ -86,7 +89,7 @@ public void testDeleted() throws Exception { Mockito.verify(client, Mockito.only()).admin(); Mockito.verify(adminClient, Mockito.only()).indices(); - Mockito.verify(indicesClient, Mockito.only()).delete(Mockito.any(), Mockito.any()); + Mockito.verify(indicesClient, Mockito.only()).delete(any(), any()); } public void testExceptionThrown() { @@ -102,7 +105,7 @@ public void testExceptionThrown() { assertEquals(indexMetadata.getIndex().getName(), request.indices()[0]); listener.onFailure(exception); return null; - }).when(indicesClient).delete(Mockito.any(), Mockito.any()); + }).when(indicesClient).delete(any(), any()); DeleteStep step = createRandomInstance(); ClusterState clusterState = ClusterState.builder(emptyClusterState()) @@ -117,7 +120,13 @@ public void testExceptionThrown() { ); } - public void testPerformActionThrowsExceptionIfIndexIsTheDataStreamWriteIndex() { + public void testPerformActionCallsFailureListenerIfIndexIsTheDataStreamWriteIndex() { + doThrow( + new IllegalStateException( + "the client must not be called in this test as we should fail in the step validation phase before we call the delete API" + ) + ).when(indicesClient).delete(any(DeleteIndexRequest.class), anyActionListener()); + String policyName = "test-ilm-policy"; String dataStreamName = randomAlphaOfLength(10); @@ -149,31 +158,27 @@ public void testPerformActionThrowsExceptionIfIndexIsTheDataStreamWriteIndex() { .metadata(Metadata.builder().put(index1, false).put(sourceIndexMetadata, false).put(dataStream).build()) .build(); - IllegalStateException illegalStateException = expectThrows( - IllegalStateException.class, - () -> createRandomInstance().performDuringNoSnapshot(sourceIndexMetadata, clusterState, new ActionListener<>() { - @Override - public void onResponse(Void complete) { - fail("unexpected listener callback"); - } - - @Override - public void onFailure(Exception e) { - fail("unexpected listener callback"); - } - }) - ); - assertThat( - illegalStateException.getMessage(), - is( - "index [" - + sourceIndexMetadata.getIndex().getName() - + "] is the write index for data stream [" - + dataStreamName - + "]. stopping execution of lifecycle [test-ilm-policy] as a data stream's write index cannot be deleted. " - + "manually rolling over the index will resume the execution of the policy as the index will not be the " - + "data stream's write index anymore" - ) - ); + createRandomInstance().performDuringNoSnapshot(sourceIndexMetadata, clusterState, new ActionListener<>() { + @Override + public void onResponse(Void complete) { + fail("unexpected listener callback"); + } + + @Override + public void onFailure(Exception e) { + assertThat( + e.getMessage(), + is( + "index [" + + sourceIndexMetadata.getIndex().getName() + + "] is the write index for data stream [" + + dataStreamName + + "]. stopping execution of lifecycle [test-ilm-policy] as a data stream's write index cannot be deleted. " + + "manually rolling over the index will resume the execution of the policy as the index will not be the " + + "data stream's write index anymore" + ) + ); + } + }); } } diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java index cb4685a0564ed..95735ffbe8a87 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java @@ -11,12 +11,14 @@ import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ilm.CheckNotDataStreamWriteIndexStep; import org.elasticsearch.xpack.core.ilm.DeleteAction; +import org.elasticsearch.xpack.core.ilm.DeleteStep; import org.elasticsearch.xpack.core.ilm.ForceMergeAction; import org.elasticsearch.xpack.core.ilm.FreezeAction; import org.elasticsearch.xpack.core.ilm.PhaseCompleteStep; @@ -37,6 +39,7 @@ import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createSnapshotRepo; import static org.elasticsearch.xpack.TimeSeriesRestDriver.explainIndex; +import static org.elasticsearch.xpack.TimeSeriesRestDriver.getBackingIndices; import static org.elasticsearch.xpack.TimeSeriesRestDriver.getOnlyIndexSettings; import static org.elasticsearch.xpack.TimeSeriesRestDriver.getStepKeyForIndex; import static org.elasticsearch.xpack.TimeSeriesRestDriver.getTemplate; @@ -45,6 +48,7 @@ import static org.elasticsearch.xpack.TimeSeriesRestDriver.waitAndGetShrinkIndexName; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; public class TimeSeriesDataStreamsIT extends ESRestTestCase { @@ -303,4 +307,46 @@ public void testDeleteOnlyIndexInDataStreamDeletesDataStream() throws Exception }); } + @SuppressWarnings("unchecked") + public void testDataStreamWithMultipleIndicesAndWriteIndexInDeletePhase() throws Exception { + createComposableTemplate(client(), template, dataStream + "*", new Template(null, null, null, null)); + indexDocument(client(), dataStream, true); + + createNewSingletonPolicy(client(), policyName, "delete", DeleteAction.NO_SNAPSHOT_DELETE); + // let's update the index template so the new write index (after rollover) is managed by an ILM policy that sents it to the + // delete step - note that we'll have here a data stream with generation 000001 not managed and the write index 000002 in the + // delete phase (the write index in this case, being not the only backing index must NOT be deleted). + createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); + + client().performRequest(new Request("POST", dataStream + "/_rollover")); + indexDocument(client(), dataStream, true); + + String secondGenerationIndex = getBackingIndices(client(), dataStream).get(1); + assertBusy(() -> { + Request explainRequest = new Request("GET", "/_data_stream/" + dataStream); + Response response = client().performRequest(explainRequest); + Map responseMap; + try (InputStream is = response.getEntity().getContent()) { + responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); + } + + List dataStreams = (List) responseMap.get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + + List indices = (List) dataStream.get("indices"); + // no index should be deleted + assertThat(indices.size(), is(2)); + + Map explainIndex = explainIndex(client(), secondGenerationIndex); + assertThat(explainIndex.get("failed_step"), is(DeleteStep.NAME)); + assertThat((Integer) explainIndex.get("failed_step_retry_count"), is(greaterThan(1))); + }); + + // rolling the data stream again would see 000002 not be the write index anymore and should be deleted automatically + client().performRequest(new Request("POST", dataStream + "/_rollover")); + + assertBusy(() -> assertThat(indexExists(secondGenerationIndex), is(false))); + } + } From 9e5fbf6021aeb213c87bb1071db03eed920872b3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Mar 2024 10:56:45 +0000 Subject: [PATCH 091/107] Extract repository-resolution logic (#105760) We use the same logic to resolve repositories in multiple APIs, but today this is hidden in `TransportGetRepositoriesAction`. This commit moves it out to its own class and gives it its own test suite. --- .../get/TransportGetRepositoriesAction.java | 78 +--------------- .../get/TransportGetSnapshotsAction.java | 12 +-- .../repositories/ResolvedRepositories.java | 81 ++++++++++++++++ .../rest/action/cat/RestSnapshotAction.java | 4 +- .../ResolvedRepositoriesTests.java | 93 +++++++++++++++++++ .../AbstractSnapshotIntegTestCase.java | 4 +- 6 files changed, 187 insertions(+), 85 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/repositories/ResolvedRepositories.java create mode 100644 server/src/test/java/org/elasticsearch/repositories/ResolvedRepositoriesTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java index b31dde0f75613..bed02ef2cbc19 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java @@ -16,29 +16,20 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; -import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.ResolvedRepositories; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.ArrayList; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; - /** * Transport action for get repositories operation */ public class TransportGetRepositoriesAction extends TransportMasterNodeReadAction { - public static final String ALL_PATTERN = "_all"; - @Inject public TransportGetRepositoriesAction( TransportService transportService, @@ -60,11 +51,6 @@ public TransportGetRepositoriesAction( ); } - public static boolean isMatchAll(String[] patterns) { - return (patterns.length == 0) - || (patterns.length == 1 && (ALL_PATTERN.equalsIgnoreCase(patterns[0]) || Regex.isMatchAllPattern(patterns[0]))); - } - @Override protected ClusterBlockException checkBlock(GetRepositoriesRequest request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); @@ -77,69 +63,11 @@ protected void masterOperation( ClusterState state, final ActionListener listener ) { - RepositoriesResult result = getRepositories(state, request.repositories()); + final var result = ResolvedRepositories.resolve(state, request.repositories()); if (result.hasMissingRepositories()) { listener.onFailure(new RepositoryMissingException(String.join(", ", result.missing()))); } else { - listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetadata(result.metadata))); - } - } - - /** - * Get repository metadata for given repository names from given cluster state. - * - * @param state Cluster state - * @param repoNames Repository names or patterns to get metadata for - * @return a result with the repository metadata that were found in the cluster state and the missing repositories - */ - public static RepositoriesResult getRepositories(ClusterState state, String[] repoNames) { - RepositoriesMetadata repositories = RepositoriesMetadata.get(state); - if (isMatchAll(repoNames)) { - return new RepositoriesResult(repositories.repositories()); - } - final List missingRepositories = new ArrayList<>(); - final List includePatterns = new ArrayList<>(); - final List excludePatterns = new ArrayList<>(); - boolean seenWildcard = false; - for (String repositoryOrPattern : repoNames) { - if (seenWildcard && repositoryOrPattern.length() > 1 && repositoryOrPattern.startsWith("-")) { - excludePatterns.add(repositoryOrPattern.substring(1)); - } else { - if (Regex.isSimpleMatchPattern(repositoryOrPattern)) { - seenWildcard = true; - } else { - if (repositories.repository(repositoryOrPattern) == null) { - missingRepositories.add(repositoryOrPattern); - } - } - includePatterns.add(repositoryOrPattern); - } - } - final String[] excludes = excludePatterns.toArray(Strings.EMPTY_ARRAY); - final Set repositoryListBuilder = new LinkedHashSet<>(); // to keep insertion order - for (String repositoryOrPattern : includePatterns) { - for (RepositoryMetadata repository : repositories.repositories()) { - if (repositoryListBuilder.contains(repository) == false - && Regex.simpleMatch(repositoryOrPattern, repository.name()) - && Regex.simpleMatch(excludes, repository.name()) == false) { - repositoryListBuilder.add(repository); - } - } - } - return new RepositoriesResult(List.copyOf(repositoryListBuilder), missingRepositories); - } - - /** - * A holder class that consists of the repository metadata and the names of the repositories that were not found in the cluster state. - */ - public record RepositoriesResult(List metadata, List missing) { - - RepositoriesResult(List repositoryMetadata) { - this(repositoryMetadata, List.of()); - } - - boolean hasMissingRepositories() { - return missing.isEmpty() == false; + listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetadata(result.repositoryMetadata()))); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index ef4ebec8c2dfc..18db17b1449e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -33,6 +32,7 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.ResolvedRepositories; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; @@ -111,7 +111,7 @@ protected void masterOperation( new GetSnapshotsOperation( (CancellableTask) task, - TransportGetRepositoriesAction.getRepositories(state, request.repositories()), + ResolvedRepositories.resolve(state, request.repositories()), request.isSingleRepositoryRequest() == false, request.snapshots(), request.ignoreUnavailable(), @@ -172,7 +172,7 @@ private class GetSnapshotsOperation { GetSnapshotsOperation( CancellableTask cancellableTask, - TransportGetRepositoriesAction.RepositoriesResult repositoriesResult, + ResolvedRepositories resolvedRepositories, boolean isMultiRepoRequest, String[] snapshots, boolean ignoreUnavailable, @@ -188,7 +188,7 @@ private class GetSnapshotsOperation { boolean indices ) { this.cancellableTask = cancellableTask; - this.repositories = repositoriesResult.metadata(); + this.repositories = resolvedRepositories.repositoryMetadata(); this.isMultiRepoRequest = isMultiRepoRequest; this.snapshots = snapshots; this.ignoreUnavailable = ignoreUnavailable; @@ -203,7 +203,7 @@ private class GetSnapshotsOperation { this.verbose = verbose; this.indices = indices; - for (final var missingRepo : repositoriesResult.missing()) { + for (final var missingRepo : resolvedRepositories.missing()) { failuresByRepository.put(missingRepo, new RepositoryMissingException(missingRepo)); } } @@ -326,7 +326,7 @@ private void loadSnapshotInfos( } final Set toResolve = new HashSet<>(); - if (TransportGetRepositoriesAction.isMatchAll(snapshots)) { + if (ResolvedRepositories.isMatchAll(snapshots)) { toResolve.addAll(allSnapshotIds.values()); } else { final List includePatterns = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/repositories/ResolvedRepositories.java b/server/src/main/java/org/elasticsearch/repositories/ResolvedRepositories.java new file mode 100644 index 0000000000000..ab4821ad942b0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/repositories/ResolvedRepositories.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; + +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +/** + * The result of calling {@link #resolve(ClusterState, String[])} to resolve a description of some snapshot repositories (from a path + * component of a request to the get-repositories or get-snapshots APIs) against the known repositories in the cluster state: the + * {@link RepositoryMetadata} for the extant repositories that match the description, together with a list of the parts of the description + * that failed to match any known repository. + * + * @param repositoryMetadata The {@link RepositoryMetadata} for the repositories that matched the description. + * @param missing The parts of the description which matched no repositories. + */ +public record ResolvedRepositories(List repositoryMetadata, List missing) { + + public static final String ALL_PATTERN = "_all"; + + public static boolean isMatchAll(String[] patterns) { + return patterns.length == 0 + || (patterns.length == 1 && (ALL_PATTERN.equalsIgnoreCase(patterns[0]) || Regex.isMatchAllPattern(patterns[0]))); + } + + public static ResolvedRepositories resolve(ClusterState state, String[] patterns) { + final var repositories = RepositoriesMetadata.get(state); + if (isMatchAll(patterns)) { + return new ResolvedRepositories(repositories.repositories(), List.of()); + } + + final List missingRepositories = new ArrayList<>(); + final List includePatterns = new ArrayList<>(); + final List excludePatterns = new ArrayList<>(); + boolean seenWildcard = false; + for (final var pattern : patterns) { + if (seenWildcard && pattern.length() > 1 && pattern.startsWith("-")) { + excludePatterns.add(pattern.substring(1)); + } else { + if (Regex.isSimpleMatchPattern(pattern)) { + seenWildcard = true; + } else { + if (repositories.repository(pattern) == null) { + missingRepositories.add(pattern); + } + } + includePatterns.add(pattern); + } + } + final var excludes = excludePatterns.toArray(Strings.EMPTY_ARRAY); + final Set repositoryListBuilder = new LinkedHashSet<>(); // to keep insertion order + for (String repositoryOrPattern : includePatterns) { + for (RepositoryMetadata repository : repositories.repositories()) { + if (repositoryListBuilder.contains(repository) == false + && Regex.simpleMatch(repositoryOrPattern, repository.name()) + && Regex.simpleMatch(excludes, repository.name()) == false) { + repositoryListBuilder.add(repository); + } + } + } + return new ResolvedRepositories(List.copyOf(repositoryListBuilder), missingRepositories); + } + + public boolean hasMissingRepositories() { + return missing.isEmpty() == false; + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 3f13205aad6b4..9b4c6534a452f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.client.internal.node.NodeClient; @@ -17,6 +16,7 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.repositories.ResolvedRepositories; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.Scope; @@ -50,7 +50,7 @@ public String getName() { @Override protected RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) { - final String[] matchAll = { TransportGetRepositoriesAction.ALL_PATTERN }; + final String[] matchAll = { ResolvedRepositories.ALL_PATTERN }; GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest().repositories(request.paramAsStringArray("repository", matchAll)) .snapshots(matchAll); diff --git a/server/src/test/java/org/elasticsearch/repositories/ResolvedRepositoriesTests.java b/server/src/test/java/org/elasticsearch/repositories/ResolvedRepositoriesTests.java new file mode 100644 index 0000000000000..04859d2847522 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/repositories/ResolvedRepositoriesTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ResolvedRepositoriesTests extends ESTestCase { + + public void testAll() { + runMatchAllTest(); + runMatchAllTest("*"); + runMatchAllTest("_all"); + } + + private static void runMatchAllTest(String... patterns) { + final var state = clusterStateWithRepositories(randomList(1, 4, ESTestCase::randomIdentifier).toArray(String[]::new)); + final var result = getRepositories(state, patterns); + assertEquals(RepositoriesMetadata.get(state).repositories(), result.repositoryMetadata()); + assertThat(result.missing(), Matchers.empty()); + assertFalse(result.hasMissingRepositories()); + } + + public void testMatchingName() { + final var state = clusterStateWithRepositories(randomList(1, 4, ESTestCase::randomIdentifier).toArray(String[]::new)); + final var name = randomFrom(RepositoriesMetadata.get(state).repositories()).name(); + final var result = getRepositories(state, name); + assertEquals(List.of(RepositoriesMetadata.get(state).repository(name)), result.repositoryMetadata()); + assertThat(result.missing(), Matchers.empty()); + assertFalse(result.hasMissingRepositories()); + } + + public void testMismatchingName() { + final var state = clusterStateWithRepositories(randomList(1, 4, ESTestCase::randomIdentifier).toArray(String[]::new)); + final var notAName = randomValueOtherThanMany( + n -> RepositoriesMetadata.get(state).repositories().stream().anyMatch(m -> n.equals(m.name())), + ESTestCase::randomIdentifier + ); + final var result = getRepositories(state, notAName); + assertEquals(List.of(), result.repositoryMetadata()); + assertEquals(List.of(notAName), result.missing()); + assertTrue(result.hasMissingRepositories()); + } + + public void testWildcards() { + final var state = clusterStateWithRepositories("test-match-1", "test-match-2", "test-exclude", "other-repo"); + + runWildcardTest(state, List.of("test-match-1", "test-match-2", "test-exclude"), "test-*"); + runWildcardTest(state, List.of("test-match-1", "test-match-2"), "test-*1", "test-*2"); + runWildcardTest(state, List.of("test-match-2", "test-match-1"), "test-*2", "test-*1"); + runWildcardTest(state, List.of("test-match-1", "test-match-2"), "test-*", "-*-exclude"); + runWildcardTest(state, List.of(), "no-*-repositories"); + runWildcardTest(state, List.of("test-match-1", "test-match-2", "other-repo"), "test-*", "-*-exclude", "other-repo"); + runWildcardTest(state, List.of("other-repo", "test-match-1", "test-match-2"), "other-repo", "test-*", "-*-exclude"); + } + + private static void runWildcardTest(ClusterState clusterState, List expectedNames, String... patterns) { + final var result = getRepositories(clusterState, patterns); + final var description = Strings.format("%s should yield %s", Arrays.toString(patterns), expectedNames); + assertFalse(description, result.hasMissingRepositories()); + assertEquals(description, expectedNames, result.repositoryMetadata().stream().map(RepositoryMetadata::name).toList()); + } + + private static ResolvedRepositories getRepositories(ClusterState clusterState, String... patterns) { + return ResolvedRepositories.resolve(clusterState, patterns); + } + + private static ClusterState clusterStateWithRepositories(String... repoNames) { + final var repositories = new ArrayList(repoNames.length); + for (final var repoName : repoNames) { + repositories.add(new RepositoryMetadata(repoName, "test", Settings.EMPTY)); + } + return ClusterState.EMPTY_STATE.copyAndUpdateMetadata( + b -> b.putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(repositories)) + ); + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 3744011b5b9f6..46b18887241dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -42,6 +41,7 @@ import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.ResolvedRepositories; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; @@ -799,7 +799,7 @@ public static Map randomUserMetadata() { } public static String[] matchAllPattern() { - return randomBoolean() ? new String[] { "*" } : new String[] { TransportGetRepositoriesAction.ALL_PATTERN }; + return randomBoolean() ? new String[] { "*" } : new String[] { ResolvedRepositories.ALL_PATTERN }; } public RepositoryMetadata getRepositoryMetadata(String repo) { From 3addbed878cff2a108daa6f07d14b9ee56df7dcb Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Mar 2024 10:57:37 +0000 Subject: [PATCH 092/107] Restrict scope of `GetSnapshotInfoContext` (#105721) `GetSnapshotInfoContext` is kind of a strange thing to expose in the `Repository` interface. It's a concrete class with fairly specific semantics which are tied to the `BlobStoreRepository` implementation, and yet callers must always construct one before calling `getSnapshotInfo`. In practice all callers call it like this: repository.getSnapshotInfo(new GetSnapshotInfoContext(...)); This commit simplifies these calls to just pass the arguments directly, and moves `GetSnapshotInfoContext` into the `o.e.r.blobstore` package where it becomes package-private. --- .../get/TransportGetSnapshotsAction.java | 19 ++--- .../TransportSnapshotsStatusAction.java | 73 +++++++++---------- .../repositories/FilterRepository.java | 13 +++- .../repositories/InvalidRepository.java | 13 +++- .../repositories/Repository.java | 25 +++++-- .../repositories/UnknownTypeRepository.java | 13 +++- .../blobstore/BlobStoreRepository.java | 27 +++++-- .../GetSnapshotInfoContext.java | 17 +++-- .../RepositoriesServiceTests.java | 11 ++- .../index/shard/RestoreOnlyRepository.java | 14 +++- .../blobstore/BlobStoreTestUtil.java | 47 +++++------- .../xpack/ccr/repository/CcrRepository.java | 33 ++++++--- ...TransportSLMGetExpiredSnapshotsAction.java | 19 ++--- ...portSLMGetExpiredSnapshotsActionTests.java | 29 +++++--- 14 files changed, 208 insertions(+), 145 deletions(-) rename server/src/main/java/org/elasticsearch/repositories/{ => blobstore}/GetSnapshotInfoContext.java (92%) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 18db17b1449e8..9b74fb77c44b3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.Nullable; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; @@ -438,19 +437,11 @@ private void snapshots(String repositoryName, Collection snapshotIds // only need to synchronize accesses related to reading SnapshotInfo from the repo final List syncSnapshots = Collections.synchronizedList(snapshots); - repository.getSnapshotInfo( - new GetSnapshotInfoContext( - snapshotIdsToIterate, - ignoreUnavailable == false, - cancellableTask::isCancelled, - (context, snapshotInfo) -> { - if (predicates.test(snapshotInfo)) { - syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); - } - }, - listeners.acquire() - ) - ); + repository.getSnapshotInfo(snapshotIdsToIterate, ignoreUnavailable == false, cancellableTask::isCancelled, snapshotInfo -> { + if (predicates.test(snapshotInfo)) { + syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } + }, listeners.acquire()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 4be6c6af3d7db..973ae9098047f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; @@ -318,45 +317,39 @@ private void loadRepositoryData( delegate.onResponse(new SnapshotsStatusResponse(Collections.unmodifiableList(builder))); } else { final List threadSafeBuilder = Collections.synchronizedList(builder); - repositoriesService.repository(repositoryName) - .getSnapshotInfo(new GetSnapshotInfoContext(snapshotIdsToLoad, true, task::isCancelled, (context, snapshotInfo) -> { - List shardStatusBuilder = new ArrayList<>(); - final Map shardStatuses; - try { - shardStatuses = snapshotShards(repositoryName, repositoryData, task, snapshotInfo); - } catch (Exception e) { - // stops all further fetches of snapshotInfo since context is fail-fast - context.onFailure(e); - return; - } - for (final var shardStatus : shardStatuses.entrySet()) { - IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue(); - shardStatusBuilder.add(new SnapshotIndexShardStatus(shardStatus.getKey(), lastSnapshotStatus)); - } - final SnapshotsInProgress.State state = switch (snapshotInfo.state()) { - case FAILED -> SnapshotsInProgress.State.FAILED; - case SUCCESS, PARTIAL -> - // Translating both PARTIAL and SUCCESS to SUCCESS for now - // TODO: add the differentiation on the metadata level in the next major release - SnapshotsInProgress.State.SUCCESS; - default -> throw new IllegalArgumentException("Unexpected snapshot state " + snapshotInfo.state()); - }; - final long startTime = snapshotInfo.startTime(); - final long endTime = snapshotInfo.endTime(); - assert endTime >= startTime || (endTime == 0L && snapshotInfo.state().completed() == false) - : "Inconsistent timestamps found in SnapshotInfo [" + snapshotInfo + "]"; - threadSafeBuilder.add( - new SnapshotStatus( - new Snapshot(repositoryName, snapshotInfo.snapshotId()), - state, - Collections.unmodifiableList(shardStatusBuilder), - snapshotInfo.includeGlobalState(), - startTime, - // Use current time to calculate overall runtime for in-progress snapshots that have endTime == 0 - (endTime == 0 ? threadPool.absoluteTimeInMillis() : endTime) - startTime - ) - ); - }, delegate.map(v -> new SnapshotsStatusResponse(List.copyOf(threadSafeBuilder))))); + repositoriesService.repository(repositoryName).getSnapshotInfo(snapshotIdsToLoad, true, task::isCancelled, snapshotInfo -> { + List shardStatusBuilder = new ArrayList<>(); + final Map shardStatuses; + shardStatuses = snapshotShards(repositoryName, repositoryData, task, snapshotInfo); + // an exception here stops further fetches of snapshotInfo since context is fail-fast + for (final var shardStatus : shardStatuses.entrySet()) { + IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue(); + shardStatusBuilder.add(new SnapshotIndexShardStatus(shardStatus.getKey(), lastSnapshotStatus)); + } + final SnapshotsInProgress.State state = switch (snapshotInfo.state()) { + case FAILED -> SnapshotsInProgress.State.FAILED; + case SUCCESS, PARTIAL -> + // Translating both PARTIAL and SUCCESS to SUCCESS for now + // TODO: add the differentiation on the metadata level in the next major release + SnapshotsInProgress.State.SUCCESS; + default -> throw new IllegalArgumentException("Unexpected snapshot state " + snapshotInfo.state()); + }; + final long startTime = snapshotInfo.startTime(); + final long endTime = snapshotInfo.endTime(); + assert endTime >= startTime || (endTime == 0L && snapshotInfo.state().completed() == false) + : "Inconsistent timestamps found in SnapshotInfo [" + snapshotInfo + "]"; + threadSafeBuilder.add( + new SnapshotStatus( + new Snapshot(repositoryName, snapshotInfo.snapshotId()), + state, + Collections.unmodifiableList(shardStatusBuilder), + snapshotInfo.includeGlobalState(), + startTime, + // Use current time to calculate overall runtime for in-progress snapshots that have endTime == 0 + (endTime == 0 ? threadPool.absoluteTimeInMillis() : endTime) - startTime + ) + ); + }, delegate.map(v -> new SnapshotsStatusResponse(List.copyOf(threadSafeBuilder)))); } })); } diff --git a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java index c88bbcfa91b98..37f1850c1fb2d 100644 --- a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -23,11 +24,13 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.Collection; import java.util.Set; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; public class FilterRepository implements Repository { @@ -47,8 +50,14 @@ public RepositoryMetadata getMetadata() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { - in.getSnapshotInfo(context); + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { + in.getSnapshotInfo(snapshotIds, abortOnFailure, isCancelled, consumer, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java index 6bd967d84c89b..948ae747e11a9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -22,10 +23,12 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.Collection; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; /** * Represents a repository that exists in the cluster state but could not be instantiated on a node, typically due to invalid configuration. @@ -54,8 +57,14 @@ public RepositoryMetadata getMetadata() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { - throw createCreationException(); + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { + listener.onFailure(createCreationException()); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index 5782dedf3cfbc..a90b0a217285c 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; @@ -31,6 +32,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; import java.util.function.Function; /** @@ -70,11 +72,24 @@ default Repository create(RepositoryMetadata metadata, Function snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ); /** * Reads a single snapshot description from the repository @@ -83,7 +98,7 @@ default Repository create(RepositoryMetadata metadata, Function listener) { - getSnapshotInfo(new GetSnapshotInfoContext(List.of(snapshotId), true, () -> false, (context, snapshotInfo) -> { + getSnapshotInfo(List.of(snapshotId), true, () -> false, snapshotInfo -> { assert Repository.assertSnapshotMetaThread(); listener.onResponse(snapshotInfo); }, new ActionListener<>() { @@ -96,7 +111,7 @@ public void onResponse(Void o) { public void onFailure(Exception e) { listener.onFailure(e); } - })); + }); } /** diff --git a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java index 30f167d8c5cf6..7821c865e166c 100644 --- a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -22,10 +23,12 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.Collection; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; /** * This class represents a repository that could not be initialized due to unknown type. @@ -52,8 +55,14 @@ public RepositoryMetadata getMetadata() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { - throw createUnknownTypeException(); + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { + listener.onFailure(createUnknownTypeException()); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index b8b0498d95125..52cfa2fd5275f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -97,7 +97,6 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.FinalizeSnapshotContext; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexMetaDataGenerations; import org.elasticsearch.repositories.RepositoriesService; @@ -151,6 +150,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BooleanSupplier; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -1778,7 +1778,20 @@ public void onAfter() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { + final var context = new GetSnapshotInfoContext(snapshotIds, abortOnFailure, isCancelled, (ctx, sni) -> { + try { + consumer.accept(sni); + } catch (Exception e) { + ctx.onFailure(e); + } + }, listener); // put snapshot info downloads into a task queue instead of pushing them all into the queue to not completely monopolize the // snapshot meta pool for a single request final int workers = Math.min(threadPool.info(ThreadPool.Names.SNAPSHOT_META).getMax(), context.snapshotIds().size()); @@ -2617,9 +2630,11 @@ public String toString() { if (snapshotIdsWithMissingDetails.isEmpty() == false) { final Map extraDetailsMap = new ConcurrentHashMap<>(); getSnapshotInfo( - new GetSnapshotInfoContext(snapshotIdsWithMissingDetails, false, () -> false, (context, snapshotInfo) -> { - extraDetailsMap.put(snapshotInfo.snapshotId(), SnapshotDetails.fromSnapshotInfo(snapshotInfo)); - }, ActionListener.runAfter(new ActionListener<>() { + snapshotIdsWithMissingDetails, + false, + () -> false, + snapshotInfo -> extraDetailsMap.put(snapshotInfo.snapshotId(), SnapshotDetails.fromSnapshotInfo(snapshotInfo)), + ActionListener.runAfter(new ActionListener<>() { @Override public void onResponse(Void aVoid) { logger.info( @@ -2636,7 +2651,7 @@ public void onResponse(Void aVoid) { public void onFailure(Exception e) { logger.warn("Failure when trying to load missing details from snapshot metadata", e); } - }, () -> filterRepositoryDataStep.onResponse(repositoryData.withExtraDetails(extraDetailsMap)))) + }, () -> filterRepositoryDataStep.onResponse(repositoryData.withExtraDetails(extraDetailsMap))) ); } else { filterRepositoryDataStep.onResponse(repositoryData); diff --git a/server/src/main/java/org/elasticsearch/repositories/GetSnapshotInfoContext.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/GetSnapshotInfoContext.java similarity index 92% rename from server/src/main/java/org/elasticsearch/repositories/GetSnapshotInfoContext.java rename to server/src/main/java/org/elasticsearch/repositories/blobstore/GetSnapshotInfoContext.java index ec8777e71ba9b..96782bca31a15 100644 --- a/server/src/main/java/org/elasticsearch/repositories/GetSnapshotInfoContext.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/GetSnapshotInfoContext.java @@ -5,12 +5,13 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -package org.elasticsearch.repositories; +package org.elasticsearch.repositories.blobstore; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.threadpool.ThreadPool; @@ -21,9 +22,9 @@ import java.util.function.BooleanSupplier; /** - * Describes the context of fetching one or more {@link SnapshotInfo} via {@link Repository#getSnapshotInfo(GetSnapshotInfoContext)}. + * Describes the context of fetching one or more {@link SnapshotInfo} via {@link Repository#getSnapshotInfo}. */ -public final class GetSnapshotInfoContext implements ActionListener { +final class GetSnapshotInfoContext implements ActionListener { private static final Logger logger = LogManager.getLogger(GetSnapshotInfoContext.class); @@ -59,7 +60,7 @@ public final class GetSnapshotInfoContext implements ActionListener snapshotIds, boolean abortOnFailure, BooleanSupplier isCancelled, @@ -77,28 +78,28 @@ public GetSnapshotInfoContext( this.doneListener = listener; } - public List snapshotIds() { + List snapshotIds() { return snapshotIds; } /** * @return true if fetching {@link SnapshotInfo} should be stopped after encountering any exception */ - public boolean abortOnFailure() { + boolean abortOnFailure() { return abortOnFailure; } /** * @return true if fetching {@link SnapshotInfo} has been cancelled */ - public boolean isCancelled() { + boolean isCancelled() { return isCancelled.getAsBoolean(); } /** * @return true if fetching {@link SnapshotInfo} is either complete or should be stopped because of an error */ - public boolean done() { + boolean done() { return counter.isCountedDown(); } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 45e4bb09c1616..5a736b4e1e9dd 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -39,6 +40,7 @@ import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -51,6 +53,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; @@ -332,7 +335,13 @@ public RepositoryMetadata getMetadata() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { throw new UnsupportedOperationException(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java index 181b6c82379ed..26e887338158d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java @@ -14,10 +14,10 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.repositories.FinalizeSnapshotContext; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexMetaDataGenerations; import org.elasticsearch.repositories.Repository; @@ -29,10 +29,12 @@ import org.elasticsearch.repositories.SnapshotShardContext; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; import java.util.Collection; import java.util.Collections; import java.util.concurrent.Executor; +import java.util.function.BooleanSupplier; import static java.util.Collections.emptyList; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; @@ -61,8 +63,14 @@ public RepositoryMetadata getMetadata() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { - throw new UnsupportedOperationException(); + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { + listener.onFailure(new UnsupportedOperationException()); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 79e4a8da713c5..d31bd16b07fcc 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.ShardGeneration; @@ -254,34 +253,26 @@ private static void assertSnapshotUUIDs( } // Assert that for each snapshot, the relevant metadata was written to index and shard folders final List snapshotInfos = Collections.synchronizedList(new ArrayList<>()); - repository.getSnapshotInfo( - new GetSnapshotInfoContext( - List.copyOf(snapshotIds), - true, - () -> false, - (ctx, sni) -> snapshotInfos.add(sni), - new ActionListener<>() { - @Override - public void onResponse(Void unused) { - try { - assertSnapshotInfosConsistency(repository, repositoryData, indices, snapshotInfos); - } catch (Exception e) { - listener.onResponse(new AssertionError(e)); - return; - } catch (AssertionError e) { - listener.onResponse(e); - return; - } - listener.onResponse(null); - } - - @Override - public void onFailure(Exception e) { - listener.onResponse(new AssertionError(e)); - } + repository.getSnapshotInfo(List.copyOf(snapshotIds), true, () -> false, snapshotInfos::add, new ActionListener<>() { + @Override + public void onResponse(Void unused) { + try { + assertSnapshotInfosConsistency(repository, repositoryData, indices, snapshotInfos); + } catch (Exception e) { + listener.onResponse(new AssertionError(e)); + return; + } catch (AssertionError e) { + listener.onResponse(e); + return; } - ) - ); + listener.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + listener.onResponse(new AssertionError(e)); + } + }); } private static void assertSnapshotInfosConsistency( diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index c13e513ef5164..2702a2e28546c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; @@ -64,7 +65,6 @@ import org.elasticsearch.indices.recovery.MultiFileWriter; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.FinalizeSnapshotContext; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexMetaDataGenerations; import org.elasticsearch.repositories.Repository; @@ -106,6 +106,7 @@ import java.util.Optional; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import java.util.function.LongConsumer; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -183,28 +184,36 @@ private RemoteClusterClient getRemoteClusterClient() { } @Override - public void getSnapshotInfo(GetSnapshotInfoContext context) { - final List snapshotIds = context.snapshotIds(); + public void getSnapshotInfo( + Collection snapshotIds, + boolean abortOnFailure, + BooleanSupplier isCancelled, + CheckedConsumer consumer, + ActionListener listener + ) { assert snapshotIds.size() == 1 && SNAPSHOT_ID.equals(snapshotIds.iterator().next()) : "RemoteClusterRepository only supports " + SNAPSHOT_ID + " as the SnapshotId but saw " + snapshotIds; try { csDeduplicator.execute( - new ThreadedActionListener<>(threadPool.executor(ThreadPool.Names.SNAPSHOT_META), context.map(response -> { + new ThreadedActionListener<>(threadPool.executor(ThreadPool.Names.SNAPSHOT_META), listener.map(response -> { Metadata responseMetadata = response.metadata(); Map indicesMap = responseMetadata.indices(); - return new SnapshotInfo( - new Snapshot(this.metadata.name(), SNAPSHOT_ID), - List.copyOf(indicesMap.keySet()), - List.copyOf(responseMetadata.dataStreams().keySet()), - List.of(), - response.getNodes().getMaxDataNodeCompatibleIndexVersion(), - SnapshotState.SUCCESS + consumer.accept( + new SnapshotInfo( + new Snapshot(this.metadata.name(), SNAPSHOT_ID), + List.copyOf(indicesMap.keySet()), + List.copyOf(responseMetadata.dataStreams().keySet()), + List.of(), + response.getNodes().getMaxDataNodeCompatibleIndexVersion(), + SnapshotState.SUCCESS + ) ); + return null; })) ); } catch (Exception e) { assert false : e; - context.onFailure(e); + listener.onFailure(e); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java index b1ec8f3a28f1b..cf3a114fc5803 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Tuple; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; @@ -194,16 +193,14 @@ static void getSnapshotDetailsByPolicy( snapshotsWithMissingDetails ); repository.getSnapshotInfo( - new GetSnapshotInfoContext( - snapshotsWithMissingDetails, - false, - () -> false, - (ignored, snapshotInfo) -> snapshotDetailsByPolicy.add( - snapshotInfo.snapshotId(), - RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo) - ), - new ThreadedActionListener<>(executor, listener.map(ignored -> snapshotDetailsByPolicy)) - ) + snapshotsWithMissingDetails, + false, + () -> false, + snapshotInfo -> snapshotDetailsByPolicy.add( + snapshotInfo.snapshotId(), + RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo) + ), + new ThreadedActionListener<>(executor, listener.map(ignored -> snapshotDetailsByPolicy)) ); } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsActionTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsActionTests.java index 1a49ad114f33f..573edc6e517bf 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsActionTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsActionTests.java @@ -16,10 +16,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; @@ -28,7 +28,6 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; -import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.tasks.Task; @@ -39,6 +38,7 @@ import org.elasticsearch.xpack.core.slm.SnapshotLifecyclePolicy; import org.elasticsearch.xpack.core.slm.SnapshotRetentionConfiguration; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -51,6 +51,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.oneOf; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -308,20 +309,26 @@ private static Repository createMockRepository(ThreadPool threadPool, List { - final GetSnapshotInfoContext getSnapshotInfoContext = invocation.getArgument(0); - final Set snapshotIds = new HashSet<>(getSnapshotInfoContext.snapshotIds()); + final Collection snapshotIdCollection = invocation.getArgument(0); + assertFalse("should not abort on failure", invocation.getArgument(1)); + final CheckedConsumer consumer = invocation.getArgument(3); + final ActionListener listener = invocation.getArgument(4); + + final Set snapshotIds = new HashSet<>(snapshotIdCollection); for (SnapshotInfo snapshotInfo : snapshotInfos) { if (snapshotIds.remove(snapshotInfo.snapshotId())) { - threadPool.generic().execute(ActionRunnable.supply(getSnapshotInfoContext, () -> snapshotInfo)); + threadPool.generic().execute(() -> { + try { + consumer.accept(snapshotInfo); + } catch (Exception e) { + fail(e); + } + }); } } - for (SnapshotId snapshotId : snapshotIds) { - threadPool.generic().execute(ActionRunnable.supply(getSnapshotInfoContext, () -> { - throw new SnapshotMissingException(REPO_NAME, snapshotId, null); - })); - } + listener.onResponse(null); return null; - }).when(repository).getSnapshotInfo(any()); + }).when(repository).getSnapshotInfo(any(), anyBoolean(), any(), any(), any()); doAnswer(invocation -> new RepositoryMetadata(REPO_NAME, "test", Settings.EMPTY)).when(repository).getMetadata(); From 35ff28025185f42e0c2b0a217151fb461e1eb7a1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Mar 2024 11:59:39 +0000 Subject: [PATCH 093/107] Simplify TransportGetSnapshotsAction#sortSnapshots (#105895) No need to copy all the matching snapshots into a separate list before applying the size limit, we can allocate the final list straight away. --- .../get/TransportGetSnapshotsAction.java | 46 +++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 9b74fb77c44b3..f7b5fec8a2dd5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -510,7 +510,27 @@ private SnapshotsInRepo sortSnapshotsWithNoOffsetOrLimit(List snap return sortSnapshots(snapshotInfos.stream(), snapshotInfos.size(), 0, GetSnapshotsRequest.NO_LIMIT); } - private SnapshotsInRepo sortSnapshots(Stream infos, int totalCount, int offset, int size) { + private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { + final var resultsStream = snapshotInfoStream.filter(buildAfterPredicate()).sorted(buildComparator()).skip(offset); + if (size == GetSnapshotsRequest.NO_LIMIT) { + return new SnapshotsInRepo(resultsStream.toList(), totalCount, 0); + } else { + final var allocateSize = Math.min(size, 1000); // ignore excessively-large sizes in request params + final var results = new ArrayList(allocateSize); + var remaining = 0; + for (var iterator = resultsStream.iterator(); iterator.hasNext();) { + final var snapshotInfo = iterator.next(); + if (results.size() < size) { + results.add(snapshotInfo); + } else { + remaining += 1; + } + } + return new SnapshotsInRepo(results, totalCount, remaining); + } + } + + private Comparator buildComparator() { final Comparator comparator = switch (sortBy) { case START_TIME -> BY_START_TIME; case NAME -> BY_NAME; @@ -520,26 +540,16 @@ private SnapshotsInRepo sortSnapshots(Stream infos, int totalCount case FAILED_SHARDS -> BY_FAILED_SHARDS_COUNT; case REPOSITORY -> BY_REPOSITORY; }; - - if (after != null) { - assert offset == 0 : "can't combine after and offset but saw [" + after + "] and offset [" + offset + "]"; - infos = infos.filter(buildAfterPredicate()); - } - infos = infos.sorted(order == SortOrder.DESC ? comparator.reversed() : comparator).skip(offset); - final List allSnapshots = infos.toList(); - final List snapshots; - if (size != GetSnapshotsRequest.NO_LIMIT) { - snapshots = allSnapshots.stream().limit(size + 1).toList(); - } else { - snapshots = allSnapshots; - } - final List resultSet = size != GetSnapshotsRequest.NO_LIMIT && size < snapshots.size() - ? snapshots.subList(0, size) - : snapshots; - return new SnapshotsInRepo(resultSet, totalCount, allSnapshots.size() - resultSet.size()); + return order == SortOrder.DESC ? comparator.reversed() : comparator; } private Predicate buildAfterPredicate() { + if (after == null) { + // TODO use constant when https://github.com/elastic/elasticsearch/pull/105881 merged + return snapshotInfo -> true; + } + assert offset == 0 : "can't combine after and offset but saw [" + after + "] and offset [" + offset + "]"; + final String snapshotName = after.snapshotName(); final String repoName = after.repoName(); final String value = after.value(); From 7ddf51918033e73a3a183ff23dcbc097b09d06b8 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Mar 2024 13:08:19 +0100 Subject: [PATCH 094/107] Cleanup duplicate code in DenseVectorFieldMapper (#105887) Cleaning up some obvious duplication and dead abstract method. --- .../vectors/DenseVectorFieldMapper.java | 98 ++++++------------- 1 file changed, 31 insertions(+), 67 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 598a6383bfdaa..47efa0ca49771 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -250,8 +250,7 @@ public void readAndWriteValue(ByteBuffer byteBuffer, XContentBuilder b) throws I b.value(byteBuffer.get()); } - @Override - KnnByteVectorField createKnnVectorField(String name, byte[] vector, VectorSimilarityFunction function) { + private KnnByteVectorField createKnnVectorField(String name, byte[] vector, VectorSimilarityFunction function) { if (vector == null) { throw new IllegalArgumentException("vector value must not be null"); } @@ -261,11 +260,6 @@ KnnByteVectorField createKnnVectorField(String name, byte[] vector, VectorSimila return new KnnByteVectorField(name, vector, denseVectorFieldType); } - @Override - KnnFloatVectorField createKnnVectorField(String name, float[] vector, VectorSimilarityFunction function) { - throw new IllegalArgumentException("cannot create a float vector field from byte"); - } - @Override IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldType, FieldDataContext fieldDataContext) { return new VectorIndexFieldData.Builder( @@ -452,8 +446,7 @@ public void readAndWriteValue(ByteBuffer byteBuffer, XContentBuilder b) throws I b.value(byteBuffer.getFloat()); } - @Override - KnnFloatVectorField createKnnVectorField(String name, float[] vector, VectorSimilarityFunction function) { + private KnnFloatVectorField createKnnVectorField(String name, float[] vector, VectorSimilarityFunction function) { if (vector == null) { throw new IllegalArgumentException("vector value must not be null"); } @@ -463,11 +456,6 @@ KnnFloatVectorField createKnnVectorField(String name, float[] vector, VectorSimi return new KnnFloatVectorField(name, vector, denseVectorFieldType); } - @Override - KnnByteVectorField createKnnVectorField(String name, byte[] vector, VectorSimilarityFunction function) { - throw new IllegalArgumentException("cannot create a byte vector field from float"); - } - @Override IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldType, FieldDataContext fieldDataContext) { return new VectorIndexFieldData.Builder( @@ -615,10 +603,6 @@ ByteBuffer createByteBuffer(IndexVersion indexVersion, int numBytes) { public abstract void readAndWriteValue(ByteBuffer byteBuffer, XContentBuilder b) throws IOException; - abstract KnnFloatVectorField createKnnVectorField(String name, float[] vector, VectorSimilarityFunction function); - - abstract KnnByteVectorField createKnnVectorField(String name, byte[] vector, VectorSimilarityFunction function); - abstract IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldType, FieldDataContext fieldDataContext); abstract void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException; @@ -1175,31 +1159,7 @@ public Query createKnnQuery( } public Query createExactKnnQuery(float[] queryVector) { - if (isIndexed() == false) { - throw new IllegalArgumentException( - "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" - ); - } - if (queryVector.length != dims) { - throw new IllegalArgumentException( - "the query vector has a different dimension [" + queryVector.length + "] than the index vectors [" + dims + "]" - ); - } - elementType.checkVectorBounds(queryVector); - if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { - float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); - elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); - if (similarity == VectorSimilarity.COSINE - && ElementType.FLOAT.equals(elementType) - && indexVersionCreated.onOrAfter(NORMALIZE_COSINE) - && isNotUnitVector(squaredMagnitude)) { - float length = (float) Math.sqrt(squaredMagnitude); - queryVector = Arrays.copyOf(queryVector, queryVector.length); - for (int i = 0; i < queryVector.length; i++) { - queryVector[i] /= length; - } - } - } + queryVector = validateAndNormalize(queryVector); VectorSimilarityFunction vectorSimilarityFunction = similarity.vectorSimilarityFunction(indexVersionCreated, elementType); return switch (elementType) { case BYTE -> { @@ -1242,12 +1202,38 @@ public Query createKnnQuery( Float similarityThreshold, BitSetProducer parentFilter ) { + queryVector = validateAndNormalize(queryVector); + Query knnQuery = switch (elementType) { + case BYTE -> { + byte[] bytes = new byte[queryVector.length]; + for (int i = 0; i < queryVector.length; i++) { + bytes[i] = (byte) queryVector[i]; + } + yield parentFilter != null + ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) + : new ESKnnByteVectorQuery(name(), bytes, numCands, filter); + } + case FLOAT -> parentFilter != null + ? new ESDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) + : new ESKnnFloatVectorQuery(name(), queryVector, numCands, filter); + }; + + if (similarityThreshold != null) { + knnQuery = new VectorSimilarityQuery( + knnQuery, + similarityThreshold, + similarity.score(similarityThreshold, elementType, dims) + ); + } + return knnQuery; + } + + private float[] validateAndNormalize(float[] queryVector) { if (isIndexed() == false) { throw new IllegalArgumentException( "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" ); } - if (queryVector.length != dims) { throw new IllegalArgumentException( "the query vector has a different dimension [" + queryVector.length + "] than the index vectors [" + dims + "]" @@ -1268,29 +1254,7 @@ && isNotUnitVector(squaredMagnitude)) { } } } - Query knnQuery = switch (elementType) { - case BYTE -> { - byte[] bytes = new byte[queryVector.length]; - for (int i = 0; i < queryVector.length; i++) { - bytes[i] = (byte) queryVector[i]; - } - yield parentFilter != null - ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) - : new ESKnnByteVectorQuery(name(), bytes, numCands, filter); - } - case FLOAT -> parentFilter != null - ? new ESDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new ESKnnFloatVectorQuery(name(), queryVector, numCands, filter); - }; - - if (similarityThreshold != null) { - knnQuery = new VectorSimilarityQuery( - knnQuery, - similarityThreshold, - similarity.score(similarityThreshold, elementType, dims) - ); - } - return knnQuery; + return queryVector; } VectorSimilarity getSimilarity() { From e55bdba027689dc34118f2013a2d2376ca3a768d Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 4 Mar 2024 13:22:41 +0100 Subject: [PATCH 095/107] Update docs/changelog/105578.yaml --- docs/changelog/105578.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml index d7420096cb178..1ffa0128c1d0a 100644 --- a/docs/changelog/105578.yaml +++ b/docs/changelog/105578.yaml @@ -1,13 +1,13 @@ pr: 105578 summary: Upgrade to Lucene 9.10.0 area: Search -type: feature -issues: - - 104556 +type: enhancement +issues: [] highlight: - title: "New Lucene 9.10 release" + title: New Lucene 9.10 release body: |- - https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search - https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. - https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. + notable: true From b96f8f0ce6a6b5b2b652c981c106a632c0f659a6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Mar 2024 13:37:36 +0100 Subject: [PATCH 096/107] Dry up some painless code (#105885) Just some Intellij automatic fixes I found while researching other things, no need to have this duplication. --- .../painless/ContextDocGenerator.java | 126 ++++-------------- .../painless/ContextGeneratorCommon.java | 32 +++-- .../org/elasticsearch/painless/api/CIDR.java | 45 +------ .../common/network/CIDRUtils.java | 4 +- 4 files changed, 52 insertions(+), 155 deletions(-) diff --git a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java index af702bd2e2fe3..d99a085d784b5 100644 --- a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java +++ b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java @@ -27,6 +27,7 @@ import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -176,7 +177,7 @@ private static void printSharedIndexPage( PrintStream sharedIndexStream = new PrintStream( Files.newOutputStream(sharedIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, - StandardCharsets.UTF_8.name() + StandardCharsets.UTF_8 ) ) { @@ -205,7 +206,7 @@ private static void printContextIndexPage( PrintStream contextIndexStream = new PrintStream( Files.newOutputStream(contextIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, - StandardCharsets.UTF_8.name() + StandardCharsets.UTF_8 ) ) { @@ -306,7 +307,7 @@ private static void printSharedPackagesPages( PrintStream sharedPackagesStream = new PrintStream( Files.newOutputStream(sharedClassesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, - StandardCharsets.UTF_8.name() + StandardCharsets.UTF_8 ) ) { @@ -329,7 +330,7 @@ private static void printContextPackagesPages( PrintStream contextPackagesStream = new PrintStream( Files.newOutputStream(contextPackagesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, - StandardCharsets.UTF_8.name() + StandardCharsets.UTF_8 ) ) { @@ -413,7 +414,7 @@ private static void printRootIndexPage(Path rootDir, List c PrintStream rootIndexStream = new PrintStream( Files.newOutputStream(rootIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, - StandardCharsets.UTF_8.name() + StandardCharsets.UTF_8 ) ) { @@ -598,18 +599,7 @@ private static String getConstructorJavaDocLink(PainlessContextConstructorInfo c javaDocLink.append(constructorInfo.getDeclaring().replace('.', '/')); javaDocLink.append(".html#("); - for (int parameterIndex = 0; parameterIndex < constructorInfo.getParameters().size(); ++parameterIndex) { - - javaDocLink.append(getLinkType(constructorInfo.getParameters().get(parameterIndex))); - - if (parameterIndex + 1 < constructorInfo.getParameters().size()) { - javaDocLink.append(","); - } - } - - javaDocLink.append(")"); - - return javaDocLink.toString(); + return collectParameters(javaDocLink, constructorInfo.getParameters()); } private static String getMethodJavaDocLink(PainlessContextMethodInfo methodInfo) { @@ -621,11 +611,15 @@ private static String getMethodJavaDocLink(PainlessContextMethodInfo methodInfo) javaDocLink.append(methodInfo.getName()); javaDocLink.append("("); - for (int parameterIndex = 0; parameterIndex < methodInfo.getParameters().size(); ++parameterIndex) { + return collectParameters(javaDocLink, methodInfo.getParameters()); + } - javaDocLink.append(getLinkType(methodInfo.getParameters().get(parameterIndex))); + private static String collectParameters(StringBuilder javaDocLink, List parameters) { + for (int parameterIndex = 0; parameterIndex < parameters.size(); ++parameterIndex) { - if (parameterIndex + 1 < methodInfo.getParameters().size()) { + javaDocLink.append(getLinkType(parameters.get(parameterIndex))); + + if (parameterIndex + 1 < parameters.size()) { javaDocLink.append(","); } } @@ -708,32 +702,19 @@ private static List sortStaticInfos(Set staticExcludes, List(staticInfos); staticInfos.removeIf(staticExcludes::contains); - staticInfos.sort((si1, si2) -> { - String sv1; - String sv2; - - if (si1 instanceof PainlessContextMethodInfo) { - sv1 = ((PainlessContextMethodInfo) si1).getSortValue(); - } else if (si1 instanceof PainlessContextClassBindingInfo) { - sv1 = ((PainlessContextClassBindingInfo) si1).getSortValue(); - } else if (si1 instanceof PainlessContextInstanceBindingInfo) { - sv1 = ((PainlessContextInstanceBindingInfo) si1).getSortValue(); - } else { - throw new IllegalArgumentException("unexpected static info type"); - } - - if (si2 instanceof PainlessContextMethodInfo) { - sv2 = ((PainlessContextMethodInfo) si2).getSortValue(); - } else if (si2 instanceof PainlessContextClassBindingInfo) { - sv2 = ((PainlessContextClassBindingInfo) si2).getSortValue(); - } else if (si2 instanceof PainlessContextInstanceBindingInfo) { - sv2 = ((PainlessContextInstanceBindingInfo) si2).getSortValue(); + staticInfos.sort(Comparator.comparing(si -> { + String sv; + if (si instanceof PainlessContextMethodInfo) { + sv = ((PainlessContextMethodInfo) si).getSortValue(); + } else if (si instanceof PainlessContextClassBindingInfo) { + sv = ((PainlessContextClassBindingInfo) si).getSortValue(); + } else if (si instanceof PainlessContextInstanceBindingInfo) { + sv = ((PainlessContextInstanceBindingInfo) si).getSortValue(); } else { throw new IllegalArgumentException("unexpected static info type"); } - - return sv1.compareTo(sv2); - }); + return sv; + })); return staticInfos; } @@ -742,48 +723,9 @@ private static List sortClassInfos( Set classExcludes, List classInfos ) { - classInfos = new ArrayList<>(classInfos); - classInfos.removeIf( - v -> "void".equals(v.getName()) - || "boolean".equals(v.getName()) - || "byte".equals(v.getName()) - || "short".equals(v.getName()) - || "char".equals(v.getName()) - || "int".equals(v.getName()) - || "long".equals(v.getName()) - || "float".equals(v.getName()) - || "double".equals(v.getName()) - || "org.elasticsearch.painless.lookup.def".equals(v.getName()) - || isInternalClass(v.getName()) - || classExcludes.contains(v) - ); - - classInfos.sort((c1, c2) -> { - String n1 = c1.getName(); - String n2 = c2.getName(); - boolean i1 = c1.isImported(); - boolean i2 = c2.isImported(); - - String p1 = n1.substring(0, n1.lastIndexOf('.')); - String p2 = n2.substring(0, n2.lastIndexOf('.')); - - int compare = p1.compareTo(p2); - - if (compare == 0) { - if (i1 && i2) { - compare = n1.substring(n1.lastIndexOf('.') + 1).compareTo(n2.substring(n2.lastIndexOf('.') + 1)); - } else if (i1 == false && i2 == false) { - compare = n1.compareTo(n2); - } else { - compare = Boolean.compare(i1, i2) * -1; - } - } - - return compare; - }); - - return classInfos; + classInfos.removeIf(v -> ContextGeneratorCommon.isExcludedClassInfo(v) || classExcludes.contains(v)); + return ContextGeneratorCommon.sortFilteredClassInfos(classInfos); } private static Map getDisplayNames(List classInfos) { @@ -802,19 +744,5 @@ private static Map getDisplayNames(List getDisplayNames(Collection sortClassInfos(Collection unsortedClassInfos) { - List classInfos = new ArrayList<>(unsortedClassInfos); - classInfos.removeIf( - v -> "void".equals(v.getName()) - || "boolean".equals(v.getName()) - || "byte".equals(v.getName()) - || "short".equals(v.getName()) - || "char".equals(v.getName()) - || "int".equals(v.getName()) - || "long".equals(v.getName()) - || "float".equals(v.getName()) - || "double".equals(v.getName()) - || "org.elasticsearch.painless.lookup.def".equals(v.getName()) - || isInternalClass(v.getName()) - ); + classInfos.removeIf(ContextGeneratorCommon::isExcludedClassInfo); + return sortFilteredClassInfos(classInfos); + } + + static boolean isExcludedClassInfo(PainlessContextClassInfo v) { + return "void".equals(v.getName()) + || "boolean".equals(v.getName()) + || "byte".equals(v.getName()) + || "short".equals(v.getName()) + || "char".equals(v.getName()) + || "int".equals(v.getName()) + || "long".equals(v.getName()) + || "float".equals(v.getName()) + || "double".equals(v.getName()) + || "org.elasticsearch.painless.lookup.def".equals(v.getName()) + || isInternalClass(v.getName()); + } + static List sortFilteredClassInfos(List classInfos) { classInfos.sort((c1, c2) -> { String n1 = c1.getName(); String n2 = c2.getName(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/CIDR.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/CIDR.java index 8ce32e182cb18..c3e39b5905cdc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/CIDR.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/CIDR.java @@ -8,10 +8,10 @@ package org.elasticsearch.painless.api; +import org.elasticsearch.common.network.CIDRUtils; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Tuple; -import java.net.InetAddress; import java.util.Arrays; /** @@ -28,7 +28,7 @@ public class CIDR { */ public CIDR(String cidr) { if (cidr.contains("/")) { - final Tuple range = getLowerUpper(InetAddresses.parseCidr(cidr)); + final Tuple range = CIDRUtils.getLowerUpper(InetAddresses.parseCidr(cidr)); lower = range.v1(); upper = range.v2(); } else { @@ -51,48 +51,13 @@ public boolean contains(String addressToCheck) { return isBetween(parsedAddress, lower, upper); } - private static Tuple getLowerUpper(Tuple cidr) { - final InetAddress value = cidr.v1(); - final Integer prefixLength = cidr.v2(); - - if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) { - throw new IllegalArgumentException( - "illegal prefixLength '" + prefixLength + "'. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges" - ); - } - - byte[] lower = value.getAddress(); - byte[] upper = value.getAddress(); - // Borrowed from Lucene - for (int i = prefixLength; i < 8 * lower.length; i++) { - int m = 1 << (7 - (i & 7)); - lower[i >> 3] &= (byte) ~m; - upper[i >> 3] |= (byte) m; - } - return new Tuple<>(lower, upper); - } - private static boolean isBetween(byte[] addr, byte[] lower, byte[] upper) { if (addr.length != lower.length) { - addr = encode(addr); - lower = encode(lower); - upper = encode(upper); + addr = CIDRUtils.encode(addr); + lower = CIDRUtils.encode(lower); + upper = CIDRUtils.encode(upper); } return Arrays.compareUnsigned(lower, addr) <= 0 && Arrays.compareUnsigned(upper, addr) >= 0; } - // Borrowed from Lucene to make this consistent IP fields matching for the mix of IPv4 and IPv6 values - // Modified signature to avoid extra conversions - private static byte[] encode(byte[] address) { - final byte[] IPV4_PREFIX = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1 }; - if (address.length == 4) { - byte[] mapped = new byte[16]; - System.arraycopy(IPV4_PREFIX, 0, mapped, 0, IPV4_PREFIX.length); - System.arraycopy(address, 0, mapped, IPV4_PREFIX.length, address.length); - address = mapped; - } else if (address.length != 16) { - throw new UnsupportedOperationException("Only IPv4 and IPv6 addresses are supported"); - } - return address; - } } diff --git a/server/src/main/java/org/elasticsearch/common/network/CIDRUtils.java b/server/src/main/java/org/elasticsearch/common/network/CIDRUtils.java index ea4d6da9b7bec..3b5a9ae1589f8 100644 --- a/server/src/main/java/org/elasticsearch/common/network/CIDRUtils.java +++ b/server/src/main/java/org/elasticsearch/common/network/CIDRUtils.java @@ -48,7 +48,7 @@ public static boolean isInRange(byte[] addr, String cidrAddress) { return isBetween(addr, lower, upper); } - private static Tuple getLowerUpper(Tuple cidr) { + public static Tuple getLowerUpper(Tuple cidr) { final InetAddress value = cidr.v1(); final Integer prefixLength = cidr.v2(); @@ -81,7 +81,7 @@ private static boolean isBetween(byte[] addr, byte[] lower, byte[] upper) { // Borrowed from Lucene to make this consistent IP fields matching for the mix of IPv4 and IPv6 values // Modified signature to avoid extra conversions - private static byte[] encode(byte[] address) { + public static byte[] encode(byte[] address) { if (address.length == 4) { byte[] mapped = new byte[16]; System.arraycopy(IPV4_PREFIX, 0, mapped, 0, IPV4_PREFIX.length); From 9c1a0797d18795f449461433911f2a9558a9ec7b Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Mon, 4 Mar 2024 13:59:21 +0100 Subject: [PATCH 097/107] Make Health API more resilient to multi-version clusters (#105789) First check whether the full cluster supports a specific indicator (feature) before we mark an indicator as "unknown" when (meta) data is missing from the cluster state. --- docs/changelog/105789.yaml | 6 +++ .../rest-api-spec/test/health/10_basic.yml | 6 +-- .../elasticsearch/health/HealthFeatures.java | 10 +++- .../node/DiskHealthIndicatorService.java | 17 ++++++- .../ShardsCapacityHealthIndicatorService.java | 15 +++++- .../elasticsearch/node/NodeConstruction.java | 6 +-- ...sitoryIntegrityHealthIndicatorService.java | 23 +++++++-- .../node/DiskHealthIndicatorServiceTests.java | 46 ++++++++++++----- ...dsCapacityHealthIndicatorServiceTests.java | 51 +++++++++++++++---- ...yIntegrityHealthIndicatorServiceTests.java | 19 +++++-- 10 files changed, 157 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/105789.yaml diff --git a/docs/changelog/105789.yaml b/docs/changelog/105789.yaml new file mode 100644 index 0000000000000..02a6936fa3294 --- /dev/null +++ b/docs/changelog/105789.yaml @@ -0,0 +1,6 @@ +pr: 105789 +summary: Make Health API more resilient to multi-version clusters +area: Health +type: bug +issues: + - 90183 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index 5e6ca8247997c..1dc35c165b4e0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -1,10 +1,8 @@ --- "cluster health basic test": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/90183" - # version: "- 8.3.99" - # reason: "health was only added in 8.2.0, and master_is_stable in 8.4.0" + version: "- 8.3.99" + reason: "health was only added in 8.2.0, and master_is_stable in 8.4.0" - do: health_report: { } diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 3a5d11f862efc..4b3bcf7e7278f 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -13,13 +13,21 @@ import org.elasticsearch.features.NodeFeature; import java.util.Map; +import java.util.Set; public class HealthFeatures implements FeatureSpecification { public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); + public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); + public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); + + @Override + public Set getFeatures() { + return Set.of(SUPPORTS_EXTENDED_REPOSITORY_INDICATOR); + } @Override public Map getHistoricalFeatures() { - return Map.of(SUPPORTS_HEALTH, Version.V_8_5_0); + return Map.of(SUPPORTS_HEALTH, Version.V_8_5_0, SUPPORTS_SHARDS_CAPACITY_INDICATOR, Version.V_8_8_0); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index 2805aa88a7e54..3304b71b4ca31 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -17,7 +17,9 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -71,9 +73,11 @@ public class DiskHealthIndicatorService implements HealthIndicatorService { private static final String IMPACT_CLUSTER_FUNCTIONALITY_UNAVAILABLE_ID = "cluster_functionality_unavailable"; private final ClusterService clusterService; + private final FeatureService featureService; - public DiskHealthIndicatorService(ClusterService clusterService) { + public DiskHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { this.clusterService = clusterService; + this.featureService = featureService; } @Override @@ -83,8 +87,18 @@ public String name() { @Override public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + ClusterState clusterState = clusterService.state(); Map diskHealthInfoMap = healthInfo.diskInfoByNode(); if (diskHealthInfoMap == null || diskHealthInfoMap.isEmpty()) { + if (featureService.clusterHasFeature(clusterState, HealthFeatures.SUPPORTS_HEALTH) == false) { + return createIndicator( + HealthStatus.GREEN, + "No disk usage data available. The cluster currently has mixed versions (an upgrade may be in progress).", + HealthIndicatorDetails.EMPTY, + List.of(), + List.of() + ); + } /* * If there is no disk health info, that either means that a new health node was just elected, or something is seriously * wrong with health data collection on the health node. Either way, we immediately return UNKNOWN. If there are at least @@ -98,7 +112,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources Collections.emptyList() ); } - ClusterState clusterState = clusterService.state(); logNodesMissingHealthInfo(diskHealthInfoMap, clusterState); DiskHealthAnalyzer diskHealthAnalyzer = new DiskHealthAnalyzer(diskHealthInfoMap, clusterState); diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index 1852e504b61db..16e18b69d5c1d 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -12,7 +12,9 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -90,9 +92,11 @@ public class ShardsCapacityHealthIndicatorService implements HealthIndicatorServ ); private final ClusterService clusterService; + private final FeatureService featureService; - public ShardsCapacityHealthIndicatorService(ClusterService clusterService) { + public ShardsCapacityHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { this.clusterService = clusterService; + this.featureService = featureService; } @Override @@ -105,6 +109,15 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources var state = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(state); if (healthMetadata == null || healthMetadata.getShardLimitsMetadata() == null) { + if (featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR) == false) { + return createIndicator( + HealthStatus.GREEN, + "No shard limits configured yet. The cluster currently has mixed versions (an upgrade may be in progress).", + HealthIndicatorDetails.EMPTY, + List.of(), + List.of() + ); + } return unknownIndicator(); } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 9323ec63c0d2d..19a6d200189f2 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1196,9 +1196,9 @@ private Module loadDiagnosticServices( var serverHealthIndicatorServices = Stream.of( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), - new RepositoryIntegrityHealthIndicatorService(clusterService), - new DiskHealthIndicatorService(clusterService), - new ShardsCapacityHealthIndicatorService(clusterService) + new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), + new DiskHealthIndicatorService(clusterService, featureService), + new ShardsCapacityHealthIndicatorService(clusterService, featureService) ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java index 0b460b5cb2fb7..67afddcb70664 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -12,7 +12,9 @@ import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -59,6 +61,8 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato public static final String NO_REPOS_CONFIGURED = "No snapshot repositories configured."; public static final String ALL_REPOS_HEALTHY = "All repositories are healthy."; public static final String NO_REPO_HEALTH_INFO = "No repository health info."; + public static final String MIXED_VERSIONS = + "No repository health info. The cluster currently has mixed versions (an upgrade may be in progress)."; public static final List IMPACTS = List.of( new HealthIndicatorImpact( @@ -95,9 +99,11 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato ); private final ClusterService clusterService; + private final FeatureService featureService; - public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService) { + public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { this.clusterService = clusterService; + this.featureService = featureService; } @Override @@ -128,7 +134,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources /** * Analyzer for the cluster's repositories health; aids in constructing a {@link HealthIndicatorResult}. */ - static class RepositoryHealthAnalyzer { + class RepositoryHealthAnalyzer { private final ClusterState clusterState; private final int totalRepositories; private final List corruptedRepositories; @@ -137,6 +143,7 @@ static class RepositoryHealthAnalyzer { private final Set invalidRepositories = new HashSet<>(); private final Set nodesWithInvalidRepos = new HashSet<>(); private final HealthStatus healthStatus; + private boolean clusterHasFeature = true; private RepositoryHealthAnalyzer( ClusterState clusterState, @@ -167,7 +174,15 @@ private RepositoryHealthAnalyzer( || invalidRepositories.isEmpty() == false) { healthStatus = YELLOW; } else if (repositoriesHealthByNode.isEmpty()) { - healthStatus = UNKNOWN; + clusterHasFeature = featureService.clusterHasFeature( + clusterState, + HealthFeatures.SUPPORTS_EXTENDED_REPOSITORY_INDICATOR + ) == false; + if (clusterHasFeature) { + healthStatus = GREEN; + } else { + healthStatus = UNKNOWN; + } } else { healthStatus = GREEN; } @@ -179,7 +194,7 @@ public HealthStatus getHealthStatus() { public String getSymptom() { if (healthStatus == GREEN) { - return ALL_REPOS_HEALTHY; + return clusterHasFeature ? ALL_REPOS_HEALTHY : MIXED_VERSIONS; } else if (healthStatus == UNKNOWN) { return NO_REPO_HEALTH_INFO; } diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index a622c1ff600d6..0d38aaf5b3e4a 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -26,7 +26,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthStatus; @@ -39,6 +41,8 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; +import org.mockito.Mockito; import java.io.IOException; import java.util.Collection; @@ -66,6 +70,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -98,10 +103,20 @@ public class DiskHealthIndicatorServiceTests extends ESTestCase { DiscoveryNodeRole.TRANSFORM_ROLE ); + private FeatureService featureService; + + @Before + public void setUp() throws Exception { + super.setUp(); + + featureService = Mockito.mock(FeatureService.class); + Mockito.when(featureService.clusterHasFeature(any(), any())).thenReturn(true); + } + public void testServiceBasics() { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); { HealthStatus expectedStatus = HealthStatus.UNKNOWN; HealthInfo healthInfo = HealthInfo.EMPTY_HEALTH_INFO; @@ -125,7 +140,7 @@ public void testServiceBasics() { public void testIndicatorYieldsGreenWhenNodeHasUnknownStatus() { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = HealthStatus.GREEN; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(HealthStatus.UNKNOWN, discoveryNodes); @@ -136,7 +151,7 @@ public void testIndicatorYieldsGreenWhenNodeHasUnknownStatus() { public void testGreen() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = HealthStatus.GREEN; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(expectedStatus, discoveryNodes); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); @@ -171,7 +186,7 @@ public void testYellowMixedNodes() throws IOException { final var clusterService = createClusterService(Set.of(), allNodes, indexNameToNodeIdsMap); HealthStatus expectedStatus = HealthStatus.YELLOW; HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, allNodes.size(), allNodes)); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat(result.symptom(), containsString("with roles: [data")); @@ -249,7 +264,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { indexNameToNodeIdsMap.put(indexName, new HashSet<>(randomNonEmptySubsetOf(affectedNodeIds))); } ClusterService clusterService = createClusterService(Set.of(), discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); Map diskInfoByNode = new HashMap<>(); for (DiscoveryNode discoveryNode : discoveryNodes) { if (affectedNodeIds.contains(discoveryNode.getId())) { @@ -313,7 +328,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { public void testRedWithBlockedIndicesAndGreenNodes() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = HealthStatus.RED; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(HealthStatus.GREEN, discoveryNodes); @@ -358,7 +373,7 @@ public void testRedWithBlockedIndicesAndGreenNodes() throws IOException { public void testRedWithBlockedIndicesAndYellowNodes() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = HealthStatus.RED; int numberOfYellowNodes = randomIntBetween(1, discoveryNodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(HealthStatus.YELLOW, numberOfYellowNodes, discoveryNodes)); @@ -437,7 +452,7 @@ public void testRedBlockedIndicesAndRedAllRolesNodes() throws IOException { } } ClusterService clusterService = createClusterService(blockedIndices, discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat( @@ -476,7 +491,7 @@ public void testRedNodesWithoutAnyBlockedIndices() throws IOException { indexNameToNodeIdsMap.put(indexName, nonRedNodeIds); } ClusterService clusterService = createClusterService(Set.of(), discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat(result.impacts().size(), equalTo(3)); @@ -512,7 +527,7 @@ public void testMissingHealthInfo() { Set discoveryNodesInClusterState = new HashSet<>(discoveryNodes); discoveryNodesInClusterState.add(DiscoveryNodeUtils.create(randomAlphaOfLength(30), UUID.randomUUID().toString())); ClusterService clusterService = createClusterService(discoveryNodesInClusterState, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); { HealthInfo healthInfo = HealthInfo.EMPTY_HEALTH_INFO; HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); @@ -544,7 +559,7 @@ public void testUnhealthyMasterNodes() { Set roles = Set.of(DiscoveryNodeRole.MASTER_ROLE, otherRole); Set discoveryNodes = createNodes(roles); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = randomFrom(HealthStatus.RED, HealthStatus.YELLOW); int numberOfProblemNodes = randomIntBetween(1, discoveryNodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, numberOfProblemNodes, discoveryNodes)); @@ -599,7 +614,7 @@ public void testUnhealthyNonDataNonMasterNodes() { Set roles = new HashSet<>(randomNonEmptySubsetOf(OTHER_ROLES)); Set nodes = createNodes(roles); ClusterService clusterService = createClusterService(nodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); HealthStatus expectedStatus = randomFrom(HealthStatus.RED, HealthStatus.YELLOW); int numberOfProblemNodes = randomIntBetween(1, nodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, numberOfProblemNodes, nodes)); @@ -655,7 +670,7 @@ public void testBlockedIndexWithRedNonDataNodesAndYellowDataNodes() { Set masterNodes = createNodes(masterRole); Set otherNodes = createNodes(otherRoles); ClusterService clusterService = createClusterService(Sets.union(Sets.union(dataNodes, masterNodes), otherNodes), true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); int numberOfRedMasterNodes = randomIntBetween(1, masterNodes.size()); int numberOfRedOtherNodes = randomIntBetween(1, otherNodes.size()); int numberOfYellowDataNodes = randomIntBetween(1, dataNodes.size()); @@ -877,7 +892,7 @@ public void testLimitNumberOfAffectedResources() { Set masterNodes = createNodes(20, masterRole); Set otherNodes = createNodes(10, otherRoles); ClusterService clusterService = createClusterService(Sets.union(Sets.union(dataNodes, masterNodes), otherNodes), true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); int numberOfRedMasterNodes = masterNodes.size(); int numberOfRedOtherNodes = otherNodes.size(); int numberOfYellowDataNodes = dataNodes.size(); @@ -1055,9 +1070,11 @@ static ClusterState createClusterState( Collection nodes, Map> indexNameToNodeIdsMap ) { + Map> features = new HashMap<>(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); for (DiscoveryNode node : nodes) { nodesBuilder = nodesBuilder.add(node); + features.put(node.getId(), Set.of(HealthFeatures.SUPPORTS_HEALTH.id())); } nodesBuilder.localNodeId(randomFrom(nodes).getId()); nodesBuilder.masterNodeId(randomFrom(nodes).getId()); @@ -1093,6 +1110,7 @@ static ClusterState createClusterState( state.metadata(metadata.generateClusterUuidIfNeeded().build()); state.routingTable(routingTable.build()); state.blocks(clusterBlocksBuilder); + state.nodeFeatures(features); return state.build(); } diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index f6e856079012d..c57f19999a915 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.index.IndexVersion; @@ -36,6 +38,7 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import org.mockito.Mockito; import java.io.IOException; import java.util.List; @@ -60,6 +63,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; public class ShardsCapacityHealthIndicatorServiceTests extends ESTestCase { @@ -68,6 +72,7 @@ public class ShardsCapacityHealthIndicatorServiceTests extends ESTestCase { private static ThreadPool threadPool; private ClusterService clusterService; + private FeatureService featureService; private DiscoveryNode dataNode; private DiscoveryNode frozenNode; @@ -86,6 +91,9 @@ public void setUp() throws Exception { .build(); clusterService = ClusterServiceUtils.createClusterService(threadPool); + + featureService = Mockito.mock(FeatureService.class); + Mockito.when(featureService.clusterHasFeature(any(), any())).thenReturn(true); } @After @@ -113,7 +121,7 @@ public void testNoShardsCapacityMetadata() throws IOException { createIndexInDataNode(100) ) ); - var target = new ShardsCapacityHealthIndicatorService(clusterService); + var target = new ShardsCapacityHealthIndicatorService(clusterService, featureService); var indicatorResult = target.calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), HealthStatus.UNKNOWN); @@ -127,7 +135,10 @@ public void testIndicatorYieldsGreenInCaseThereIsRoom() throws IOException { int maxShardsPerNode = randomValidMaxShards(); int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, maxShardsPerNodeFrozen, createIndexInDataNode(maxShardsPerNode / 4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), HealthStatus.GREEN); assertTrue(indicatorResult.impacts().isEmpty()); @@ -151,7 +162,10 @@ public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOExcep // Only data_nodes does not have enough space int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), YELLOW); assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); @@ -174,7 +188,10 @@ public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOExcep // Only frozen_nodes does not have enough space int maxShardsPerNode = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, 25, createIndexInFrozenNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), YELLOW); assertEquals( @@ -199,7 +216,10 @@ public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOExcep { // Both data and frozen nodes does not have enough space var clusterService = createClusterService(25, 25, createIndexInDataNode(4), createIndexInFrozenNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), YELLOW); assertEquals( @@ -230,7 +250,10 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio // Only data_nodes does not have enough space int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), RED); assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); @@ -253,7 +276,10 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio // Only frozen_nodes does not have enough space int maxShardsPerNode = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, 25, createIndexInFrozenNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), RED); assertEquals( @@ -278,7 +304,10 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio { // Both data and frozen nodes does not have enough space var clusterService = createClusterService(25, 25, createIndexInDataNode(11), createIndexInFrozenNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + true, + HealthInfo.EMPTY_HEALTH_INFO + ); assertEquals(indicatorResult.status(), RED); assertEquals( @@ -397,7 +426,11 @@ private ClusterState createClusterState( metadata.put(idxMetadata); } - return ClusterState.builder(clusterState).metadata(metadata).build(); + var features = Set.of(HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR.id()); + return ClusterState.builder(clusterState) + .metadata(metadata) + .nodeFeatures(Map.of(dataNode.getId(), features, frozenNode.getId(), features)) + .build(); } private static IndexMetadata.Builder createIndexInDataNode(int shards) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java index 0dfe27ee6dc50..572375d64d8b8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -18,8 +18,10 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.Diagnosis.Resource.Type; +import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.SimpleHealthIndicatorDetails; @@ -27,12 +29,14 @@ import org.elasticsearch.health.node.RepositoriesHealthInfo; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Stream; import static org.elasticsearch.cluster.node.DiscoveryNode.DISCOVERY_NODE_COMPARATOR; @@ -47,6 +51,7 @@ import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.UNKNOWN_DEFINITION; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -55,6 +60,7 @@ public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { private DiscoveryNode node1; private DiscoveryNode node2; private HealthInfo healthInfo; + private FeatureService featureService; @Before public void setUp() throws Exception { @@ -74,6 +80,9 @@ public void setUp() throws Exception { ) ) ); + + featureService = Mockito.mock(FeatureService.class); + Mockito.when(featureService.clusterHasFeature(any(), any())).thenReturn(true); } public void testIsGreenWhenAllRepositoriesAreHealthy() { @@ -349,11 +358,13 @@ public void testMappedFieldsForTelemetry() { } private ClusterState createClusterStateWith(RepositoriesMetadata metadata) { - var builder = ClusterState.builder(new ClusterName("test-cluster")); + var features = Set.of(HealthFeatures.SUPPORTS_EXTENDED_REPOSITORY_INDICATOR.id()); + var builder = ClusterState.builder(new ClusterName("test-cluster")) + .nodes(DiscoveryNodes.builder().add(node1).add(node2).build()) + .nodeFeatures(Map.of(node1.getId(), features, node2.getId(), features)); if (metadata != null) { builder.metadata(Metadata.builder().putCustom(RepositoriesMetadata.TYPE, metadata)); } - builder.nodes(DiscoveryNodes.builder().add(node1).add(node2).build()); return builder.build(); } @@ -361,10 +372,10 @@ private static RepositoryMetadata createRepositoryMetadata(String name, boolean return new RepositoryMetadata(name, "uuid", "s3", Settings.EMPTY, corrupted ? CORRUPTED_REPO_GEN : EMPTY_REPO_GEN, EMPTY_REPO_GEN); } - private static RepositoryIntegrityHealthIndicatorService createRepositoryIntegrityHealthIndicatorService(ClusterState clusterState) { + private RepositoryIntegrityHealthIndicatorService createRepositoryIntegrityHealthIndicatorService(ClusterState clusterState) { var clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); - return new RepositoryIntegrityHealthIndicatorService(clusterService); + return new RepositoryIntegrityHealthIndicatorService(clusterService, featureService); } private SimpleHealthIndicatorDetails createDetails(int total, int corruptedCount, List corrupted, int unknown, int invalid) { From fc8e2b789730c01dd9e9d994b82a9a29beabaef7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Mar 2024 14:01:21 +0100 Subject: [PATCH 098/107] Introduce Predicate Utilities for always true/false use-cases (#105881) Just a suggetion. I think this would save us a bit of memory here and there. We have loads of places where the always true lambdas are used with `Predicate.or/and`. Found this initially when looking into field caps performance where we used to heavily compose these but many spots in security and index name resolution gain from these predicates. The better toString also helps in some cases at least when debugging. --- .../org/elasticsearch/core/Predicates.java | 92 +++++++++++++++++++ .../ingest/common/KeyValueProcessor.java | 3 +- .../TransportClusterSearchShardsAction.java | 5 +- .../state/TransportClusterStateAction.java | 3 +- .../alias/get/TransportGetAliasesAction.java | 3 +- .../org/elasticsearch/action/bulk/Retry.java | 5 +- .../org/elasticsearch/action/bulk/Retry2.java | 5 +- .../action/fieldcaps/FieldCapabilities.java | 5 +- .../action/fieldcaps/ResponseRewriter.java | 3 +- .../action/search/TransportSearchAction.java | 5 +- .../org/elasticsearch/bootstrap/ESPolicy.java | 6 +- .../cluster/ClusterStateObserver.java | 7 +- .../metadata/IndexNameExpressionResolver.java | 15 ++- .../cluster/routing/IndexRoutingTable.java | 6 +- .../cluster/routing/RoutingTable.java | 5 +- .../common/network/NetworkUtils.java | 3 +- .../org/elasticsearch/common/regex/Regex.java | 5 +- .../elasticsearch/env/NodeEnvironment.java | 5 +- .../index/shard/ShardSplittingQuery.java | 8 +- .../indices/IndicesQueryCache.java | 3 +- .../elasticsearch/indices/SystemIndices.java | 5 +- .../elasticsearch/plugins/MapperPlugin.java | 3 +- .../plugins/internal/RestExtension.java | 5 +- .../aggregations/bucket/terms/LongTerms.java | 3 +- .../snapshots/SnapshotsService.java | 3 +- .../transport/ProxyConnectionStrategy.java | 3 +- .../transport/TransportService.java | 5 +- .../put/UpdateSettingsRequestTests.java | 3 +- .../cluster/metadata/MetadataTests.java | 3 +- .../decider/AllocationDecidersTests.java | 3 +- .../index/reindex/ReindexRequestTests.java | 5 +- .../index/mapper/MockFieldFilterPlugin.java | 3 +- .../test/AbstractSerializationTestCase.java | 3 +- .../test/AbstractXContentTestCase.java | 3 +- .../test/InternalTestCluster.java | 7 +- .../test/rest/yaml/section/VersionRange.java | 5 +- .../ReactiveStorageDeciderService.java | 5 +- .../ccr/action/TransportFollowInfoAction.java | 3 +- .../elasticsearch/xpack/core/ml/MlTasks.java | 7 +- .../authc/support/UserRoleMapper.java | 3 +- .../mapper/expressiondsl/ExpressionModel.java | 3 +- .../restriction/WorkflowsRestriction.java | 5 +- .../core/security/support/Automatons.java | 6 ++ .../core/security/support/StringMatcher.java | 9 +- .../security/support/StringMatcherTests.java | 3 +- .../xpack/ml/MlDailyMaintenanceService.java | 5 +- .../TransportEvaluateDataFrameAction.java | 3 +- .../TransportInternalInferModelAction.java | 5 +- .../action/TransportSetUpgradeModeAction.java | 7 +- .../MlMemoryAutoscalingDecider.java | 3 +- .../xpack/ml/utils/VoidChainTaskExecutor.java | 4 +- .../TransportGetServiceAccountAction.java | 3 +- .../audit/logfile/LoggingAuditTrail.java | 3 +- .../authc/service/FileTokensTool.java | 3 +- .../transform/transforms/TransformTask.java | 5 +- 55 files changed, 246 insertions(+), 93 deletions(-) create mode 100644 libs/core/src/main/java/org/elasticsearch/core/Predicates.java diff --git a/libs/core/src/main/java/org/elasticsearch/core/Predicates.java b/libs/core/src/main/java/org/elasticsearch/core/Predicates.java new file mode 100644 index 0000000000000..47ac9ef258d68 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/Predicates.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import java.util.function.Predicate; + +/** + * Utilities around predicates. + */ +public enum Predicates { + ; + + @SuppressWarnings("rawtypes") + private static final Predicate NEVER = new Predicate() { + @Override + public boolean test(Object o) { + return false; + } + + @Override + public Predicate and(Predicate other) { + return this; + } + + @Override + public Predicate negate() { + return ALWAYS; + } + + @Override + public Predicate or(Predicate other) { + return other; + } + + @Override + public String toString() { + return "Predicate[NEVER]"; + } + }; + + @SuppressWarnings("rawtypes") + private static final Predicate ALWAYS = new Predicate() { + @Override + public boolean test(Object o) { + return true; + } + + @Override + public Predicate and(Predicate other) { + return other; + } + + @Override + public Predicate negate() { + return NEVER; + } + + @Override + public Predicate or(Predicate other) { + return this; + } + + @Override + public String toString() { + return "Predicate[ALWAYS]"; + } + }; + + /** + * @return a predicate that accepts all input values + * @param type of the predicate + */ + @SuppressWarnings("unchecked") + public static Predicate always() { + return (Predicate) ALWAYS; + } + + /** + * @return a predicate that rejects all input values + * @param type of the predicate + */ + @SuppressWarnings("unchecked") + public static Predicate never() { + return (Predicate) NEVER; + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 8c90beed4d01c..0c6e37f675e1d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.core.Predicates; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; @@ -100,7 +101,7 @@ private static Consumer buildExecution( final Predicate keyFilter; if (includeKeys == null) { if (excludeKeys == null) { - keyFilter = key -> true; + keyFilter = Predicates.always(); } else { keyFilter = key -> excludeKeys.contains(key) == false; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index ccfd192246c0a..826fa453e0402 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.internal.AliasFilter; @@ -85,8 +86,8 @@ protected void masterOperation( final String[] aliases = indexNameExpressionResolver.indexAliases( clusterState, index, - aliasMetadata -> true, - dataStreamAlias -> true, + Predicates.always(), + Predicates.always(), true, indicesAndAliases ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index 29bffa3949258..c6431c7a593cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -89,7 +90,7 @@ protected void masterOperation( final CancellableTask cancellableTask = (CancellableTask) task; final Predicate acceptableClusterStatePredicate = request.waitForMetadataVersion() == null - ? clusterState -> true + ? Predicates.always() : clusterState -> clusterState.metadata().version() >= request.waitForMetadataVersion(); final Predicate acceptableClusterStateOrFailedPredicate = request.local() diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 3e8e6fbfde75c..4f7525c700fc2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; @@ -160,7 +161,7 @@ private static void checkSystemIndexAccess( ) { final Predicate systemIndexAccessAllowPredicate; if (systemIndexAccessLevel == SystemIndexAccessLevel.NONE) { - systemIndexAccessAllowPredicate = indexName -> false; + systemIndexAccessAllowPredicate = Predicates.never(); } else if (systemIndexAccessLevel == SystemIndexAccessLevel.RESTRICTED) { systemIndexAccessAllowPredicate = systemIndices.getProductSystemIndexNamePredicate(threadContext); } else { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java index 33fb81a6520cb..62ef9a08f0070 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.Scheduler; @@ -104,14 +105,14 @@ static class RetryHandler extends DelegatingActionListener true)); + addResponses(bulkItemResponses, Predicates.always()); finishHim(); } else { if (canRetry(bulkItemResponses)) { addResponses(bulkItemResponses, (r -> r.isFailed() == false)); retry(createBulkRequestForRetry(bulkItemResponses)); } else { - addResponses(bulkItemResponses, (r -> true)); + addResponses(bulkItemResponses, Predicates.always()); finishHim(); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/Retry2.java b/server/src/main/java/org/elasticsearch/action/bulk/Retry2.java index 784ba1eb95d5d..999bd6af925a6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/Retry2.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/Retry2.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; @@ -183,7 +184,7 @@ public void onResponse(BulkResponse bulkItemResponses) { bulkItemResponses.getItems().length ); // we're done here, include all responses - addResponses(bulkItemResponses, (r -> true)); + addResponses(bulkItemResponses, Predicates.always()); listener.onResponse(getAccumulatedResponse()); } else { if (canRetry(bulkItemResponses)) { @@ -201,7 +202,7 @@ public void onResponse(BulkResponse bulkItemResponses) { bulkItemResponses.getTook(), bulkItemResponses.getItems().length ); - addResponses(bulkItemResponses, (r -> true)); + addResponses(bulkItemResponses, Predicates.always()); listener.onResponse(getAccumulatedResponse()); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 095a5ec8f5594..856571c305615 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.InstantiatingObjectParser; @@ -567,7 +568,7 @@ private String[] filterIndices(int length, Predicate pred) { } FieldCapabilities build(boolean withIndices) { - final String[] indices = withIndices ? filterIndices(totalIndices, ic -> true) : null; + final String[] indices = withIndices ? filterIndices(totalIndices, Predicates.always()) : null; // Iff this field is searchable in some indices AND non-searchable in others // we record the list of non-searchable indices @@ -603,7 +604,7 @@ FieldCapabilities build(boolean withIndices) { // Collect all indices that have this field. If it is marked differently in different indices, we cannot really // make a decisions which index is "right" and which index is "wrong" so collecting all indices where this field // is present is probably the only sensible thing to do here - metricConflictsIndices = Objects.requireNonNullElseGet(indices, () -> filterIndices(totalIndices, ic -> true)); + metricConflictsIndices = Objects.requireNonNullElseGet(indices, () -> filterIndices(totalIndices, Predicates.always())); } else { metricConflictsIndices = null; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java index 38b0287522207..c4e9b1bce6d81 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.core.Predicates; import java.util.HashMap; import java.util.Map; @@ -49,7 +50,7 @@ private static Function buildTra String[] filters, String[] allowedTypes ) { - Predicate test = ifc -> true; + Predicate test = Predicates.always(); Set objects = null; Set nestedObjects = null; if (allowedTypes.length > 0) { diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index d80322b2954c6..0922e15999e8c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -198,8 +199,8 @@ private Map buildPerIndexOriginalIndices( String[] aliases = indexNameExpressionResolver.indexAliases( clusterState, index, - aliasMetadata -> true, - dataStreamAlias -> true, + Predicates.always(), + Predicates.always(), true, indicesAndAliases ); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index 4eea930589dc7..e8244fcd576ff 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -8,6 +8,7 @@ package org.elasticsearch.bootstrap; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; import java.io.FilePermission; @@ -201,7 +202,10 @@ public String getActions() { // from this policy file or further restrict it to code sources // that you specify, because Thread.stop() is potentially unsafe." // not even sure this method still works... - private static final Permission BAD_DEFAULT_NUMBER_ONE = new BadDefaultPermission(new RuntimePermission("stopThread"), p -> true); + private static final Permission BAD_DEFAULT_NUMBER_ONE = new BadDefaultPermission( + new RuntimePermission("stopThread"), + Predicates.always() + ); // default policy file states: // "allows anyone to listen on dynamic ports" diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index 74deb90ee411a..40ddafa498ecb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -33,8 +34,6 @@ public class ClusterStateObserver { public static final Predicate NON_NULL_MASTER_PREDICATE = state -> state.nodes().getMasterNode() != null; - private static final Predicate MATCH_ALL_CHANGES_PREDICATE = state -> true; - private final ClusterApplierService clusterApplierService; private final ThreadPool threadPool; private final ThreadContext contextHolder; @@ -109,11 +108,11 @@ public boolean isTimedOut() { } public void waitForNextChange(Listener listener) { - waitForNextChange(listener, MATCH_ALL_CHANGES_PREDICATE); + waitForNextChange(listener, Predicates.always()); } public void waitForNextChange(Listener listener, @Nullable TimeValue timeOutValue) { - waitForNextChange(listener, MATCH_ALL_CHANGES_PREDICATE, timeOutValue); + waitForNextChange(listener, Predicates.always(), timeOutValue); } public void waitForNextChange(Listener listener, Predicate statePredicate) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 0446b479b191d..4c3318d8d2f6a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; @@ -59,8 +60,6 @@ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); - private static final Predicate ALWAYS_TRUE = s -> true; - public static final String EXCLUDED_DATA_STREAMS_KEY = "es.excluded_ds"; public static final IndexVersion SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION = IndexVersions.V_8_0_0; @@ -101,7 +100,7 @@ public String[] concreteIndexNamesWithSystemIndexAccess(ClusterState state, Indi false, request.includeDataStreams(), SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY, - ALWAYS_TRUE, + Predicates.always(), this.getNetNewSystemIndexPredicate() ); return concreteIndexNames(context, request.indices()); @@ -397,7 +396,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { private void checkSystemIndexAccess(Context context, Set concreteIndices) { final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == ALWAYS_TRUE) { + if (systemIndexAccessPredicate == Predicates.always()) { return; } doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); @@ -947,11 +946,11 @@ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; if (systemIndexAccessLevel == SystemIndexAccessLevel.NONE) { - systemIndexAccessLevelPredicate = s -> false; + systemIndexAccessLevelPredicate = Predicates.never(); } else if (systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY) { systemIndexAccessLevelPredicate = getNetNewSystemIndexPredicate(); } else if (systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - systemIndexAccessLevelPredicate = ALWAYS_TRUE; + systemIndexAccessLevelPredicate = Predicates.always(); } else { // everything other than allowed should be included in the deprecation message systemIndexAccessLevelPredicate = systemIndices.getProductSystemIndexNamePredicate(threadContext); @@ -981,7 +980,7 @@ public static class Context { private final Predicate netNewSystemIndexPredicate; Context(ClusterState state, IndicesOptions options, SystemIndexAccessLevel systemIndexAccessLevel) { - this(state, options, systemIndexAccessLevel, ALWAYS_TRUE, s -> false); + this(state, options, systemIndexAccessLevel, Predicates.always(), Predicates.never()); } Context( @@ -1722,7 +1721,7 @@ public ResolverContext() { } public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, name -> false, name -> false); + super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 8fbdd3790e158..6679f17a0427b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -54,11 +55,10 @@ public class IndexRoutingTable implements SimpleDiffable { private static final List> PRIORITY_REMOVE_CLAUSES = Stream.>of( shardRouting -> shardRouting.isPromotableToPrimary() == false, - shardRouting -> true + Predicates.always() ) .flatMap( - p1 -> Stream.>of(ShardRouting::unassigned, ShardRouting::initializing, shardRouting -> true) - .map(p1::and) + p1 -> Stream.>of(ShardRouting::unassigned, ShardRouting::initializing, Predicates.always()).map(p1::and) ) .toList(); private final Index index; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 723d65fbc2a3f..855793e9e9782 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; @@ -249,7 +250,7 @@ private GroupShardsIterator allSatisfyingPredicateShardsGrouped( } public ShardsIterator allShards(String[] indices) { - return allShardsSatisfyingPredicate(indices, shardRouting -> true, false); + return allShardsSatisfyingPredicate(indices, Predicates.always(), false); } public ShardsIterator allActiveShards(String[] indices) { @@ -257,7 +258,7 @@ public ShardsIterator allActiveShards(String[] indices) { } public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) { - return allShardsSatisfyingPredicate(indices, shardRouting -> true, true); + return allShardsSatisfyingPredicate(indices, Predicates.always(), true); } private ShardsIterator allShardsSatisfyingPredicate( diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkUtils.java b/server/src/main/java/org/elasticsearch/common/network/NetworkUtils.java index f7cfff8402304..b2602b9c4f9d0 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkUtils.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkUtils.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.elasticsearch.core.Predicates; import java.io.IOException; import java.net.Inet4Address; @@ -188,7 +189,7 @@ static InetAddress[] getGlobalAddresses() throws IOException { /** Returns all addresses (any scope) for interfaces that are up. * This is only used to pick a publish address, when the user set network.host to a wildcard */ public static InetAddress[] getAllAddresses() throws IOException { - return filterAllAddresses(address -> true, "no up-and-running addresses found"); + return filterAllAddresses(Predicates.always(), "no up-and-running addresses found"); } static Optional maybeGetInterfaceByName(List networkInterfaces, String name) { diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index 532fc2ae9a019..039f484f1ebca 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -14,6 +14,7 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Predicates; import java.util.ArrayList; import java.util.Arrays; @@ -102,12 +103,12 @@ public static Automaton simpleMatchToAutomaton(String... patterns) { */ public static Predicate simpleMatcher(String... patterns) { if (patterns == null || patterns.length == 0) { - return str -> false; + return Predicates.never(); } boolean hasWildcard = false; for (String pattern : patterns) { if (isMatchAllPattern(pattern)) { - return str -> true; + return Predicates.always(); } if (isSimpleMatchPattern(pattern)) { hasWildcard = true; diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 1d8a9ef1ce1c4..b246802d06fee 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -38,6 +38,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; @@ -1119,7 +1120,7 @@ public Path[] availableShardPaths(ShardId shardId) { * Returns all folder names in ${data.paths}/indices folder */ public Set availableIndexFolders() throws IOException { - return availableIndexFolders(p -> false); + return availableIndexFolders(Predicates.never()); } /** @@ -1147,7 +1148,7 @@ public Set availableIndexFolders(Predicate excludeIndexPathIdsPr * @throws IOException if an I/O exception occurs traversing the filesystem */ public Set availableIndexFoldersForPath(final DataPath dataPath) throws IOException { - return availableIndexFoldersForPath(dataPath, p -> false); + return availableIndexFoldersForPath(dataPath, Predicates.never()); } /** diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index ca9de756ca211..389485ac4eaf2 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.IndexRouting; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -140,7 +141,12 @@ public Scorer scorer(LeafReaderContext context) throws IOException { * of the document that contains them. */ FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs(RoutingFieldMapper.NAME, ref -> false, leafReader, maybeWrapConsumer.apply(hasRoutingValue::set)); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { if (hasRoutingValue.get(docId) == false) { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index 7394e5eb89458..a40a5ab2e2fe8 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.shard.ShardId; @@ -78,7 +79,7 @@ public IndicesQueryCache(Settings settings) { logger.debug("using [node] query cache with size [{}] max filter count [{}]", size, count); if (INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.get(settings)) { // Use the default skip_caching_factor (i.e., 10f) in Lucene - cache = new ElasticsearchLRUQueryCache(count, size.getBytes(), context -> true, 10f); + cache = new ElasticsearchLRUQueryCache(count, size.getBytes(), Predicates.always(), 10f); } else { cache = new ElasticsearchLRUQueryCache(count, size.getBytes()); } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index f23f28e4c1047..3261ac83a7e67 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -33,6 +33,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -384,11 +385,11 @@ static SystemIndexDescriptor findMatchingDescriptor(SystemIndexDescriptor[] inde public Predicate getProductSystemIndexNamePredicate(ThreadContext threadContext) { final String product = threadContext.getHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY); if (product == null) { - return name -> false; + return Predicates.never(); } final CharacterRunAutomaton automaton = productToSystemIndicesMatcher.get(product); if (automaton == null) { - return name -> false; + return Predicates.never(); } return automaton::run; } diff --git a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java index 5124c94e545c0..401c014488f88 100644 --- a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.plugins; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.RuntimeField; @@ -69,7 +70,7 @@ default Function> getFieldFilter() { * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index * get field mappings and field capabilities API will return every field that's present in the mappings. */ - Predicate NOOP_FIELD_PREDICATE = field -> true; + Predicate NOOP_FIELD_PREDICATE = Predicates.always(); /** * The default field filter applied, which doesn't filter anything. That means that by default get mappings, get index diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java b/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java index 4864e6bf31222..175d10a096b55 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java @@ -8,6 +8,7 @@ package org.elasticsearch.plugins.internal; +import org.elasticsearch.core.Predicates; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.action.cat.AbstractCatAction; @@ -38,12 +39,12 @@ static RestExtension allowAll() { return new RestExtension() { @Override public Predicate getCatActionsFilter() { - return action -> true; + return Predicates.always(); } @Override public Predicate getActionsFilter() { - return handler -> true; + return Predicates.always(); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index 45067208cbdd2..b0af2c3d4e618 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Predicates; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -208,7 +209,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont } else if (format == DocValueFormat.UNSIGNED_LONG_SHIFTED) { needsPromoting = docFormat -> docFormat == DocValueFormat.RAW; } else { - needsPromoting = docFormat -> false; + needsPromoting = Predicates.never(); } return new AggregatorReducer() { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index a0782fa8814cd..d505a6ded4809 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -73,6 +73,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; @@ -2266,7 +2267,7 @@ public static IndexVersion minCompatibleVersion( IndexVersion minCompatVersion = minNodeVersion; final Collection snapshotIds = repositoryData.getSnapshotIds(); for (SnapshotId snapshotId : snapshotIds.stream() - .filter(excluded == null ? sn -> true : Predicate.not(excluded::contains)) + .filter(excluded == null ? Predicates.always() : Predicate.not(excluded::contains)) .toList()) { final IndexVersion known = repositoryData.getVersion(snapshotId); // If we don't have the version cached in the repository data yet we load it from the snapshot info blobs diff --git a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java index cfb6f872ce748..b0c4a6cd95156 100644 --- a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -182,7 +183,7 @@ public class ProxyConnectionStrategy extends RemoteConnectionStrategy { connectionManager.getCredentialsManager() ), actualProfile.getHandshakeTimeout(), - cn -> true, + Predicates.always(), listener.map(resp -> { ClusterName remote = resp.getClusterName(); if (remoteClusterName.compareAndSet(null, remote)) { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 7f1d63b092cdb..2ade579f216e4 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -37,6 +37,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; @@ -356,7 +357,7 @@ protected void doStop() { // but there may still be pending handlers for node-local requests since this connection is not closed, and we may also // (briefly) track handlers for requests which are sent concurrently with stopping even though the underlying connection is // now closed. We complete all these outstanding handlers here: - for (final Transport.ResponseContext holderToNotify : responseHandlers.prune(h -> true)) { + for (final Transport.ResponseContext holderToNotify : responseHandlers.prune(Predicates.always())) { try { final TransportResponseHandler handler = holderToNotify.handler(); final var targetNode = holderToNotify.connection().getNode(); @@ -499,7 +500,7 @@ public void connectToNode( public ConnectionManager.ConnectionValidator connectionValidator(DiscoveryNode node) { return (newConnection, actualProfile, listener) -> { // We don't validate cluster names to allow for CCS connections. - handshake(newConnection, actualProfile.getHandshakeTimeout(), cn -> true, listener.map(resp -> { + handshake(newConnection, actualProfile.getHandshakeTimeout(), Predicates.always(), listener.map(resp -> { final DiscoveryNode remote = resp.discoveryNode; if (node.equals(remote) == false) { throw new ConnectTransportException(node, "handshake failed. unexpected remote node " + remote); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestTests.java index 48ab2b0802616..36e347204d1cc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Predicates; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.xcontent.ToXContent; @@ -110,7 +111,7 @@ protected Predicate getRandomFieldsExcludeFilter() { if (enclosedSettings) { return field -> field.startsWith("settings"); } - return field -> true; + return Predicates.always(); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 07ccf0e8f34e7..1e35a40dedc17 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -783,7 +784,7 @@ public void testFindMappingsWithFilters() throws IOException { && field.equals("address.location") == false; } if (index.equals("index2")) { - return field -> false; + return Predicates.never(); } return MapperPlugin.NOOP_FIELD_PREDICATE; }, Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java index ac3984a2ded21..4fe07756a1d6b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecidersTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; @@ -113,7 +114,7 @@ private static List generateDecisions(Decision mandatory, Supplier decisions) { - return collectToMultiDecision(decisions, ignored -> true); + return collectToMultiDecision(decisions, Predicates.always()); } private static Decision.Multi collectToMultiDecision(List decisions, Predicate filter) { diff --git a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index 65c060aa9005a..c8cce9a9910e7 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.slice.SliceBuilder; @@ -115,7 +116,7 @@ protected ReindexRequest createTestInstance() { @Override protected ReindexRequest doParseInstance(XContentParser parser) throws IOException { - return ReindexRequest.fromXContent(parser, nf -> false); + return ReindexRequest.fromXContent(parser, Predicates.never()); } @Override @@ -403,7 +404,7 @@ private ReindexRequest parseRequestWithSourceIndices(Object sourceIndices) throw request = BytesReference.bytes(b); } try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) { - return ReindexRequest.fromXContent(p, nf -> false); + return ReindexRequest.fromXContent(p, Predicates.never()); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java index 21c6b50809ea9..16cb0b4656fcf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.core.Predicates; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; @@ -19,6 +20,6 @@ public class MockFieldFilterPlugin extends Plugin implements MapperPlugin { @Override public Function> getFieldFilter() { // this filter doesn't filter any field out, but it's used to exercise the code path executed when the filter is not no-op - return index -> field -> true; + return index -> Predicates.always(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java index 238f523872f83..922f2ba74dcf2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -126,7 +127,7 @@ protected boolean supportsUnknownFields() { * Returns a predicate that given the field name indicates whether the field has to be excluded from random fields insertion or not */ protected Predicate getRandomFieldsExcludeFilter() { - return field -> false; + return Predicates.never(); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index 4df1e745f3bf4..848ec3c2f1738 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Predicates; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -326,7 +327,7 @@ protected boolean assertToXContentEquivalence() { * Returns a predicate that given the field name indicates whether the field has to be excluded from random fields insertion or not */ protected Predicate getRandomFieldsExcludeFilter() { - return field -> false; + return Predicates.never(); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 38c38e719138e..16320b3b26301 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -650,7 +651,7 @@ private NodeAndClient getOrBuildRandomNode() { } private NodeAndClient getRandomNodeAndClient() { - return getRandomNodeAndClient(nc -> true); + return getRandomNodeAndClient(Predicates.always()); } private synchronized NodeAndClient getRandomNodeAndClient(Predicate predicate) { @@ -1621,7 +1622,7 @@ private synchronized T getInstance(Class clazz, Predicate * Returns a reference to a random nodes instances of the given class >T< */ public T getInstance(Class clazz) { - return getInstance(clazz, nc -> true); + return getInstance(clazz, Predicates.always()); } private static T getInstanceFromNode(Class clazz, Node node) { @@ -1990,7 +1991,7 @@ public String getMasterName(@Nullable String viaNode) { * @return the name of a random node in a cluster */ public String getRandomNodeName() { - return getNodeNameThat(ignored -> true); + return getNodeNameThat(Predicates.always()); } /** diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/VersionRange.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/VersionRange.java index 20b9708c5ac25..ab5377532bbbc 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/VersionRange.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/VersionRange.java @@ -9,6 +9,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.core.Predicates; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.ESRestTestCase; @@ -23,9 +24,9 @@ class VersionRange { private VersionRange() {} - static final Predicate> NEVER = v -> false; + static final Predicate> NEVER = Predicates.never(); - static final Predicate> ALWAYS = v -> true; + static final Predicate> ALWAYS = Predicates.always(); static final Predicate> CURRENT = versions -> versions.size() == 1 && versions.contains(Build.current().version()); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index ffa3a7308da90..2379e5f8e9380 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -199,11 +200,11 @@ static String message(long unassignedBytes, long assignedBytes) { } static boolean isDiskOnlyNoDecision(Decision decision) { - return singleNoDecision(decision, single -> true).map(DiskThresholdDecider.NAME::equals).orElse(false); + return singleNoDecision(decision, Predicates.always()).map(DiskThresholdDecider.NAME::equals).orElse(false); } static boolean isResizeOnlyNoDecision(Decision decision) { - return singleNoDecision(decision, single -> true).map(ResizeAllocationDecider.NAME::equals).orElse(false); + return singleNoDecision(decision, Predicates.always()).map(ResizeAllocationDecider.NAME::equals).orElse(false); } static boolean isFilterTierOnlyDecision(Decision decision, IndexMetadata indexMetadata) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java index 46c44c9b2392b..a66a79a0f7d76 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -89,7 +90,7 @@ static List getFollowInfos(List concreteFollowerIndices, C if (ccrCustomData != null) { Optional result; if (persistentTasks != null) { - result = persistentTasks.findTasks(ShardFollowTask.NAME, task -> true) + result = persistentTasks.findTasks(ShardFollowTask.NAME, Predicates.always()) .stream() .map(task -> (ShardFollowTask) task.getParams()) .filter(shardFollowTask -> index.equals(shardFollowTask.getFollowShardId().getIndexName())) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java index 6209ead0cc6a1..6281f656954e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -9,6 +9,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.persistent.PersistentTasksClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -308,7 +309,7 @@ public static Collection> openJo return Collections.emptyList(); } - return tasks.findTasks(JOB_TASK_NAME, task -> true); + return tasks.findTasks(JOB_TASK_NAME, Predicates.always()); } public static Collection> datafeedTasksOnNode( @@ -360,7 +361,7 @@ public static Collection> snapsh return Collections.emptyList(); } - return tasks.findTasks(JOB_SNAPSHOT_UPGRADE_TASK_NAME, task -> true); + return tasks.findTasks(JOB_SNAPSHOT_UPGRADE_TASK_NAME, Predicates.always()); } public static Collection> snapshotUpgradeTasksOnNode( @@ -439,7 +440,7 @@ public static Set startedDatafeedIds(@Nullable PersistentTasksCustomMeta return Collections.emptySet(); } - return tasks.findTasks(DATAFEED_TASK_NAME, task -> true) + return tasks.findTasks(DATAFEED_TASK_NAME, Predicates.always()) .stream() .map(t -> t.getId().substring(DATAFEED_TASK_ID_PREFIX.length())) .collect(Collectors.toSet()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java index 5addca91902cd..96fb7ff4e6f41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java @@ -15,6 +15,7 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; @@ -87,7 +88,7 @@ public ExpressionModel asModel() { groups, groups.stream().>map(g -> new DistinguishedNamePredicate(g, dnNormalizer)) .reduce(Predicate::or) - .orElse(fieldValue -> false) + .orElse(Predicates.never()) ); metadata.keySet().forEach(k -> model.defineField("metadata." + k, metadata.get(k))); model.defineField("realm.name", realm.name()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionModel.java index 3251c54945335..9d25e6830bbbd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionModel.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionModel.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Numbers; +import org.elasticsearch.core.Predicates; import java.util.Collection; import java.util.Collections; @@ -100,7 +101,7 @@ static Predicate buildPredicate(Object object) { return ((Collection) object).stream() .map(element -> buildPredicate(element)) .reduce((a, b) -> a.or(b)) - .orElse(fieldValue -> false); + .orElse(Predicates.never()); } throw new IllegalArgumentException("Unsupported value type " + object.getClass()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/restriction/WorkflowsRestriction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/restriction/WorkflowsRestriction.java index f1d9d694304e5..811c6b36d4f7e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/restriction/WorkflowsRestriction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/restriction/WorkflowsRestriction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.security.authz.restriction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import java.util.Set; import java.util.function.Predicate; @@ -26,10 +27,10 @@ public WorkflowsRestriction(Set names) { this.names = names; if (names == null) { // No restriction, all workflows are allowed - this.predicate = name -> true; + this.predicate = Predicates.always(); } else if (names.isEmpty()) { // Empty restriction, no workflow is allowed - this.predicate = name -> false; + this.predicate = Predicates.never(); } else { this.predicate = name -> { if (name == null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 5d7a4b279298c..f601aa144aa00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import java.util.ArrayList; @@ -312,6 +313,11 @@ static int getMaxDeterminizedStates() { } private static Predicate predicate(Automaton automaton, final String toString) { + if (automaton == MATCH_ALL) { + return Predicates.always(); + } else if (automaton == EMPTY) { + return Predicates.never(); + } CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); return new Predicate() { @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java index 235fb3635bac6..ede11fe157487 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Predicates; import java.util.ArrayList; import java.util.Collection; @@ -34,9 +35,7 @@ */ public class StringMatcher implements Predicate { - private static final StringMatcher MATCH_NOTHING = new StringMatcher("(empty)", s -> false); - - protected static final Predicate ALWAYS_TRUE_PREDICATE = s -> true; + private static final StringMatcher MATCH_NOTHING = new StringMatcher("(empty)", Predicates.never()); private final String description; private final Predicate predicate; @@ -70,7 +69,7 @@ public boolean test(String s) { } public boolean isTotal() { - return predicate == ALWAYS_TRUE_PREDICATE; + return predicate == Predicates.always(); } // For testing @@ -130,7 +129,7 @@ public StringMatcher build() { final String description = describe(allText); if (nonExactMatch.contains("*")) { - return new StringMatcher(description, ALWAYS_TRUE_PREDICATE); + return new StringMatcher(description, Predicates.always()); } if (exactMatch.isEmpty()) { return new StringMatcher(description, buildAutomataPredicate(nonExactMatch)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java index 1582cf3404bdc..2e31f760f6db2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.test.ESTestCase; import java.util.List; @@ -49,7 +50,7 @@ public void testMatchAllWildcard() throws Exception { assertMatch(matcher, randomAlphaOfLengthBetween(i, 20)); } - assertThat(matcher.getPredicate(), sameInstance(StringMatcher.ALWAYS_TRUE_PREDICATE)); + assertThat(matcher.getPredicate(), sameInstance(Predicates.always())); } public void testSingleWildcard() throws Exception { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java index 8c46f7229c655..71469fccc0032 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -288,8 +289,8 @@ public void triggerDeleteJobsInStateDeletingWithoutDeletionTask(ActionListener> chainTaskExecutor = new TypedChainTaskExecutor<>( EsExecutors.DIRECT_EXECUTOR_SERVICE, - unused -> true, - unused -> true + Predicates.always(), + Predicates.always() ); for (String jobId : jobsInStateDeletingWithoutDeletionTask) { DeleteJobAction.Request request = new DeleteJobAction.Request(jobId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 61db7f683f0f3..92c9909441b14 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -128,7 +129,7 @@ private static final class EvaluationExecutor extends TypedChainTaskExecutor true, unused -> true); + super(threadPool.generic(), Predicates.always(), Predicates.always()); this.client = client; this.parameters = parameters; this.request = request; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 6a8dca8e2776b..d54cac9dca496 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.license.License; @@ -175,9 +176,9 @@ private void getModelAndInfer( TypedChainTaskExecutor typedChainTaskExecutor = new TypedChainTaskExecutor<>( EsExecutors.DIRECT_EXECUTOR_SERVICE, // run through all tasks - r -> true, + Predicates.always(), // Always fail immediately and return an error - ex -> true + Predicates.always() ); request.getObjectsToInfer().forEach(stringObjectMap -> typedChainTaskExecutor.add(chainedTask -> { if (task.isCancelled()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java index 07b556cf9a989..4f4eee6e5c597 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.persistent.PersistentTasksClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -300,7 +301,7 @@ private void unassignPersistentTasks( TypedChainTaskExecutor> chainTaskExecutor = new TypedChainTaskExecutor<>( executor, - r -> true, + Predicates.always(), // Another process could modify tasks and thus we cannot find them via the allocation_id and name // If the task was removed from the node, all is well // We handle the case of allocation_id changing later in this transport class by timing out waiting for task completion @@ -330,8 +331,8 @@ private void isolateDatafeeds( logger.info("Isolating datafeeds: " + datafeedsToIsolate.toString()); TypedChainTaskExecutor isolateDatafeedsExecutor = new TypedChainTaskExecutor<>( executor, - r -> true, - ex -> true + Predicates.always(), + Predicates.always() ); datafeedsToIsolate.forEach(datafeedId -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDecider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDecider.java index 4ff7e66d296d0..dfe0e557f749d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDecider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDecider.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.logging.LogManager; @@ -913,7 +914,7 @@ private static Collection true) + return tasksCustomMetadata.findTasks(MlTasks.DATAFEED_TASK_NAME, Predicates.always()) .stream() .map(p -> (PersistentTasksCustomMetadata.PersistentTask) p) .toList(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java index d5d7767a7e7a1..f7c46222d4471 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.ml.utils; +import org.elasticsearch.core.Predicates; + import java.util.concurrent.ExecutorService; import java.util.function.Predicate; @@ -16,7 +18,7 @@ public class VoidChainTaskExecutor extends TypedChainTaskExecutor { public VoidChainTaskExecutor(ExecutorService executorService, boolean shortCircuit) { - this(executorService, (a) -> true, (e) -> shortCircuit); + this(executorService, Predicates.always(), shortCircuit ? Predicates.always() : Predicates.never()); } VoidChainTaskExecutor( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountAction.java index f8a4a8a449f83..372a550eedbc9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Predicates; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.service.GetServiceAccountAction; @@ -38,7 +39,7 @@ public TransportGetServiceAccountAction(TransportService transportService, Actio @Override protected void doExecute(Task task, GetServiceAccountRequest request, ActionListener listener) { - Predicate filter = v -> true; + Predicate filter = Predicates.always(); if (request.getNamespace() != null) { filter = filter.and(v -> v.id().namespace().equals(request.getNamespace())); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 87c372f561757..01104806c4a1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.node.Node; @@ -1908,7 +1909,7 @@ Predicate ignorePredicate() { } private static Predicate buildIgnorePredicate(Map policyMap) { - return policyMap.values().stream().map(EventFilterPolicy::ignorePredicate).reduce(x -> false, (x, y) -> x.or(y)); + return policyMap.values().stream().map(EventFilterPolicy::ignorePredicate).reduce(Predicates.never(), Predicate::or); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/FileTokensTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/FileTokensTool.java index 51adcab5c3c13..14ca1663e16a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/FileTokensTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/FileTokensTool.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.EnvironmentAwareCommand; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -132,7 +133,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce + "]" ); } - Predicate filter = k -> true; + Predicate filter = Predicates.always(); if (args.size() == 1) { final String principal = args.get(0); if (false == ServiceAccountService.isServiceAccountPrincipal(principal)) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index b0435a08a4187..8a78be8417020 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -607,7 +608,7 @@ public static PersistentTask getTransformTask(String transformId, ClusterStat } public static Collection> findAllTransformTasks(ClusterState clusterState) { - return findTransformTasks(task -> true, clusterState); + return findTransformTasks(Predicates.always(), clusterState); } public static Collection> findTransformTasks(Set transformIds, ClusterState clusterState) { @@ -616,7 +617,7 @@ public static Collection> findTransformTasks(Set trans public static Collection> findTransformTasks(String transformIdPattern, ClusterState clusterState) { Predicate> taskMatcher = transformIdPattern == null - || Strings.isAllOrWildcard(transformIdPattern) ? t -> true : t -> { + || Strings.isAllOrWildcard(transformIdPattern) ? Predicates.always() : t -> { TransformTaskParams transformParams = (TransformTaskParams) t.getParams(); return Regex.simpleMatch(transformIdPattern, transformParams.getId()); }; From c57d96a853de9674254310a38e64fdc475ddc65f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Mar 2024 08:16:03 -0500 Subject: [PATCH 099/107] Test mute for #105839 (#105902) mute for: https://github.com/elastic/elasticsearch/issues/105839 --- .../search/aggregations/bucket/RandomSamplerIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index 53075e31cd6f9..c9a6cfaf754c6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -85,6 +85,7 @@ public void setupSuiteScopeCluster() throws Exception { ensureSearchable(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105839") public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; From fdfc08a257388a54a435678696237ccbc77be504 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 4 Mar 2024 15:31:33 +0100 Subject: [PATCH 100/107] Grow buckets on GlobalAggregator and RandomSamplerAggregator eagerly (#105762) --- .../search/aggregations/bucket/global/GlobalAggregator.java | 3 ++- .../bucket/sampler/random/RandomSamplerAggregator.java | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 3beec89853b76..ce3031d4cddf8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -43,10 +43,11 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, if (scorer == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } + grow(1); scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { - collectBucket(sub, doc, 0); + collectExistingBucket(sub, doc, 0); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index a279b8270cd57..276e0bbf300d2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -101,10 +101,11 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } // No sampling is being done, collect all docs if (probability >= 1.0) { + grow(1); return new LeafBucketCollector() { @Override public void collect(int doc, long owningBucketOrd) throws IOException { - collectBucket(sub, doc, 0); + collectExistingBucket(sub, doc, 0); } }; } @@ -117,11 +118,12 @@ public void collect(int doc, long owningBucketOrd) throws IOException { final DocIdSetIterator docIt = scorer.iterator(); final Bits liveDocs = aggCtx.getLeafReaderContext().reader().getLiveDocs(); try { + grow(1); // Iterate every document provided by the scorer iterator for (int docId = docIt.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docIt.nextDoc()) { // If liveDocs is null, that means that every doc is a live doc, no need to check if it has been deleted or not if (liveDocs == null || liveDocs.get(docIt.docID())) { - collectBucket(sub, docIt.docID(), 0); + collectExistingBucket(sub, docIt.docID(), 0); } } // This collector could throw `CollectionTerminatedException` if the last leaf collector has stopped collecting From 0732628eaa91c183b3b5851eeff68d7394100c8e Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Mar 2024 14:37:49 +0000 Subject: [PATCH 101/107] Use constant predicate in `buildAfterPredicate` (#105905) Relates #105881 --- .../cluster/snapshots/get/TransportGetSnapshotsAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index f7b5fec8a2dd5..ce3446317400d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Predicates; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; @@ -545,8 +546,7 @@ private Comparator buildComparator() { private Predicate buildAfterPredicate() { if (after == null) { - // TODO use constant when https://github.com/elastic/elasticsearch/pull/105881 merged - return snapshotInfo -> true; + return Predicates.always(); } assert offset == 0 : "can't combine after and offset but saw [" + after + "] and offset [" + offset + "]"; From 6ae9dbfda7d71ae3f1bd2bddf9334d37b3294632 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 4 Mar 2024 16:43:41 +0100 Subject: [PATCH 102/107] [DOCS] Adds cohere service example to the inference API tutorial (#105904) Co-authored-by: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> --- .../semantic-search-inference.asciidoc | 205 ++---------------- .../infer-api-ingest-pipeline-widget.asciidoc | 39 ++++ .../infer-api-ingest-pipeline.asciidoc | 63 ++++++ .../infer-api-mapping-widget.asciidoc | 39 ++++ .../inference-api/infer-api-mapping.asciidoc | 71 ++++++ .../infer-api-reindex-widget.asciidoc | 39 ++++ .../inference-api/infer-api-reindex.asciidoc | 55 +++++ .../infer-api-requirements-widget.asciidoc | 39 ++++ .../infer-api-requirements.asciidoc | 14 ++ .../infer-api-search-widget.asciidoc | 39 ++++ .../inference-api/infer-api-search.asciidoc | 139 ++++++++++++ .../infer-api-task-widget.asciidoc | 39 ++++ .../inference-api/infer-api-task.asciidoc | 56 +++++ 13 files changed, 654 insertions(+), 183 deletions(-) create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc create mode 100644 docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 249fddce9c416..b9bb36b21ea12 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -4,18 +4,21 @@ Semantic search with the {infer} API ++++ -The instructions in this tutorial shows you how to use the {infer} API with the -Open AI service to perform semantic search on your data. The following example -uses OpenAI's `text-embedding-ada-002` second generation embedding model. You -can use any OpenAI models, they are all supported by the {infer} API. +The instructions in this tutorial shows you how to use the {infer} API with +various services to perform semantic search on your data. The following examples +use Cohere's `embed-english-light-v3.0` model and OpenAI's +`text-embedding-ada-002` second generation embedding model. You can use any +Cohere and OpenAI models, they are all supported by the {infer} API. + +Click the name of the service you want to use on any of the widgets below to +review the corresponding instructions. [discrete] -[[infer-openai-requirements]] +[[infer-service-requirements]] ==== Requirements -An https://openai.com/[OpenAI account] is required to use the {infer} API with -the OpenAI service. +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc[] [discrete] @@ -24,113 +27,30 @@ the OpenAI service. Create the {infer} task by using the <>: -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/openai_embeddings <1> -{ - "service": "openai", - "service_settings": { - "api_key": "" <2> - }, - "task_settings": { - "model": "text-embedding-ada-002" <3> - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The task type is `text_embedding` in the path. -<2> The API key of your OpenAI account. You can find your OpenAI API keys in -your OpenAI account under the -https://platform.openai.com/api-keys[API keys section]. You need to provide -your API key only once. The <> does not return your API -key. -<3> The name of the embedding model to use. You can find the list of OpenAI -embedding models -https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] [discrete] -[[infer-openai-mappings]] +[[infer-service-mappings]] ==== Create the index mapping The mapping of the destination index - the index that contains the embeddings that the model will create based on your input text - must be created. The destination index must have a field with the <> -field type to index the output of the OpenAI model. +field type to index the output of the used model. -[source,console] --------------------------------------------------- -PUT openai-embeddings -{ - "mappings": { - "properties": { - "content_embedding": { <1> - "type": "dense_vector", <2> - "dims": 1536, <3> - "element_type": "float", - "similarity": "dot_product" <4> - }, - "content": { <5> - "type": "text" <6> - } - } - } -} --------------------------------------------------- -<1> The name of the field to contain the generated tokens. It must be refrenced -in the {infer} pipeline configuration in the next step. -<2> The field to contain the tokens is a `dense_vector` field. -<3> The output dimensions of the model. Find this value in the -https://platform.openai.com/docs/guides/embeddings/embedding-models[OpenAI documentation] -of the model you use. -<4> The faster` dot_product` function can be used to calculate similarity -because OpenAI embeddings are normalised to unit length. You can check the -https://platform.openai.com/docs/guides/embeddings/which-distance-function-should-i-use[OpenAI docs] -about which similarity function to use. -<5> The name of the field from which to create the sparse vector representation. -In this example, the name of the field is `content`. It must be referenced in -the {infer} pipeline configuration in the next step. -<6> The field type which is text in this example. +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc[] [discrete] -[[infer-openai-inference-ingest-pipeline]] +[[infer-service-inference-ingest-pipeline]] ==== Create an ingest pipeline with an inference processor Create an <> with an -<> and use the OpenAI model you created -above to infer against the data that is being ingested in the -pipeline. +<> and use the model you created above to +infer against the data that is being ingested in the pipeline. -[source,console] --------------------------------------------------- -PUT _ingest/pipeline/openai_embeddings -{ - "processors": [ - { - "inference": { - "model_id": "openai_embeddings", <1> - "input_output": { <2> - "input_field": "content", - "output_field": "content_embedding" - } - } - } - ] -} --------------------------------------------------- -<1> The name of the inference model you created by using the -<>. -<2> Configuration object that defines the `input_field` for the {infer} process -and the `output_field` that will contain the {infer} results. - -//// -[source,console] ----- -DELETE _ingest/pipeline/openai_embeddings ----- -// TEST[continued] -//// +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc[] [discrete] @@ -157,32 +77,10 @@ you can see an index named `test-data` with 182469 documents. [[reindexing-data-infer]] ==== Ingest the data through the {infer} ingest pipeline -Create the embeddings from the text by reindexing the data throught the {infer} -pipeline that uses the OpenAI model as the inference model. +Create the embeddings from the text by reindexing the data through the {infer} +pipeline that uses the chosen model as the inference model. -[source,console] ----- -POST _reindex?wait_for_completion=false -{ - "source": { - "index": "test-data", - "size": 50 <1> - }, - "dest": { - "index": "openai-embeddings", - "pipeline": "openai_embeddings" - } -} ----- -// TEST[skip:TBD] -<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller -number makes the update of the reindexing process quicker which enables you to -follow the progress closely and detect errors early. - -NOTE: The -https://platform.openai.com/account/limits[rate limit of your OpenAI account] -may affect the throughput of the reindexing process. If this happens, change -`size` to `3` or a similar value in magnitude. +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc[] The call returns a task ID to monitor the progress: @@ -214,63 +112,4 @@ provide the query text and the model you have used to create the embeddings. NOTE: If you cancelled the reindexing process, you run the query only a part of the data which affects the quality of your results. -[source,console] --------------------------------------------------- -GET openai-embeddings/_search -{ - "knn": { - "field": "content_embedding", - "query_vector_builder": { - "text_embedding": { - "model_id": "openai_embeddings", - "model_text": "Calculate fuel cost" - } - }, - "k": 10, - "num_candidates": 100 - }, - "_source": [ - "id", - "content" - ] -} --------------------------------------------------- -// TEST[skip:TBD] - -As a result, you receive the top 10 documents that are closest in meaning to the -query from the `openai-embeddings` index sorted by their proximity to the query: - -[source,consol-result] --------------------------------------------------- -"hits": [ - { - "_index": "openai-embeddings", - "_id": "DDd5OowBHxQKHyc3TDSC", - "_score": 0.83704096, - "_source": { - "id": 862114, - "body": "How to calculate fuel cost for a road trip. By Tara Baukus Mello • Bankrate.com. Dear Driving for Dollars, My family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost.It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes.y family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost. It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes." - } - }, - { - "_index": "openai-embeddings", - "_id": "ajd5OowBHxQKHyc3TDSC", - "_score": 0.8345704, - "_source": { - "id": 820622, - "body": "Home Heating Calculator. Typically, approximately 50% of the energy consumed in a home annually is for space heating. When deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important.This calculator can help you estimate the cost of fuel for different heating appliances.hen deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important. This calculator can help you estimate the cost of fuel for different heating appliances." - } - }, - { - "_index": "openai-embeddings", - "_id": "Djd5OowBHxQKHyc3TDSC", - "_score": 0.8327426, - "_source": { - "id": 8202683, - "body": "Fuel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel.If you are paying $4 per gallon, the trip would cost you $200.Most boats have much larger gas tanks than cars.uel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel." - } - }, - (...) - ] --------------------------------------------------- -// NOTCONSOLE +include::{es-repo-dir}/tab-widgets/inference-api/infer-api-search-widget.asciidoc[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc new file mode 100644 index 0000000000000..44d2f60966caa --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-ingest-pipeline.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc new file mode 100644 index 0000000000000..a5a1910e8f8ef --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc @@ -0,0 +1,63 @@ +//// + +[source,console] +---- +DELETE _ingest/pipeline/*_embeddings +---- +// TEST +// TEARDOWN + +//// + +// tag::cohere[] + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/cohere_embeddings +{ + "processors": [ + { + "inference": { + "model_id": "cohere_embeddings", <1> + "input_output": { <2> + "input_field": "content", + "output_field": "content_embedding" + } + } + } + ] +} +-------------------------------------------------- +<1> The name of the inference configuration you created by using the +<>. +<2> Configuration object that defines the `input_field` for the {infer} process +and the `output_field` that will contain the {infer} results. + +// end::cohere[] + + +// tag::openai[] + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/openai_embeddings +{ + "processors": [ + { + "inference": { + "model_id": "openai_embeddings", <1> + "input_output": { <2> + "input_field": "content", + "output_field": "content_embedding" + } + } + } + ] +} +-------------------------------------------------- +<1> The name of the inference configuration you created by using the +<>. +<2> Configuration object that defines the `input_field` for the {infer} process +and the `output_field` that will contain the {infer} results. + +// end::openai[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc new file mode 100644 index 0000000000000..336c8052c282f --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-mapping.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc new file mode 100644 index 0000000000000..4b70a1b84f45f --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc @@ -0,0 +1,71 @@ +// tag::cohere[] + +[source,console] +-------------------------------------------------- +PUT cohere-embeddings +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "dense_vector", <2> + "dims": 384, <3> + "element_type": "float" + }, + "content": { <4> + "type": "text" <5> + } + } + } +} +-------------------------------------------------- +<1> The name of the field to contain the generated tokens. It must be refrenced +in the {infer} pipeline configuration in the next step. +<2> The field to contain the tokens is a `dense_vector` field. +<3> The output dimensions of the model. Find this value in the +https://docs.cohere.com/reference/embed[Cohere documentation] of the model you +use. +<4> The name of the field from which to create the dense vector representation. +In this example, the name of the field is `content`. It must be referenced in +the {infer} pipeline configuration in the next step. +<5> The field type which is text in this example. + +// end::cohere[] + + +// tag::openai[] + +[source,console] +-------------------------------------------------- +PUT openai-embeddings +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "dense_vector", <2> + "dims": 1536, <3> + "element_type": "float", + "similarity": "dot_product" <4> + }, + "content": { <5> + "type": "text" <6> + } + } + } +} +-------------------------------------------------- +<1> The name of the field to contain the generated tokens. It must be refrenced +in the {infer} pipeline configuration in the next step. +<2> The field to contain the tokens is a `dense_vector` field. +<3> The output dimensions of the model. Find this value in the +https://platform.openai.com/docs/guides/embeddings/embedding-models[OpenAI documentation] +of the model you use. +<4> The faster` dot_product` function can be used to calculate similarity +because OpenAI embeddings are normalised to unit length. You can check the +https://platform.openai.com/docs/guides/embeddings/which-distance-function-should-i-use[OpenAI docs] +about which similarity function to use. +<5> The name of the field from which to create the dense vector representation. +In this example, the name of the field is `content`. It must be referenced in +the {infer} pipeline configuration in the next step. +<6> The field type which is text in this example. + +// end::openai[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc new file mode 100644 index 0000000000000..a73e4d7d76fc1 --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-reindex.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc new file mode 100644 index 0000000000000..92e781f8b5a8a --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc @@ -0,0 +1,55 @@ +// tag::cohere[] + +[source,console] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "cohere-embeddings", + "pipeline": "cohere_embeddings" + } +} +---- +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +NOTE: The +https://dashboard.cohere.com/billing[rate limit of your Cohere account] +may affect the throughput of the reindexing process. + +// end::cohere[] + + +// tag::openai[] + +[source,console] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "openai-embeddings", + "pipeline": "openai_embeddings" + } +} +---- +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +NOTE: The +https://platform.openai.com/account/limits[rate limit of your OpenAI account] +may affect the throughput of the reindexing process. If this happens, change +`size` to `3` or a similar value in magnitude. + +// end::openai[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc new file mode 100644 index 0000000000000..d1b981158c11b --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-requirements.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc new file mode 100644 index 0000000000000..f0bed750b69c9 --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc @@ -0,0 +1,14 @@ +// tag::cohere[] + +A https://cohere.com/[Cohere account] is required to use the {infer} API with +the Cohere service. + +// end::cohere[] + + +// tag::openai[] + +An https://openai.com/[OpenAI account] is required to use the {infer} API with +the OpenAI service. + +// end::openai[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc new file mode 100644 index 0000000000000..4433f2da067f1 --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-search.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc new file mode 100644 index 0000000000000..0c71ab7cecbce --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -0,0 +1,139 @@ +// tag::cohere[] + +[source,console] +-------------------------------------------------- +GET cohere-embeddings/_search +{ + "knn": { + "field": "content_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": "Calculate fuel cost" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "content" + ] +} +-------------------------------------------------- +// TEST[skip:TBD] + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `cohere-embeddings` index sorted by their proximity to the query: + +[source,consol-result] +-------------------------------------------------- +"hits": [ + { + "_index": "cohere-embeddings", + "_id": "-eFWCY4BECzWLnMZuI78", + "_score": 0.737484, + "_source": { + "id": 1690948, + "content": "Oxygen is supplied to the muscles via red blood cells. Red blood cells carry hemoglobin which oxygen bonds with as the hemoglobin rich blood cells pass through the blood vessels of the lungs.The now oxygen rich blood cells carry that oxygen to the cells that are demanding it, in this case skeletal muscle cells.ther ways in which muscles are supplied with oxygen include: 1 Blood flow from the heart is increased. 2 Blood flow to your muscles in increased. 3 Blood flow from nonessential organs is transported to working muscles." + } + }, + { + "_index": "cohere-embeddings", + "_id": "HuFWCY4BECzWLnMZuI_8", + "_score": 0.7176013, + "_source": { + "id": 1692482, + "content": "The thoracic cavity is separated from the abdominal cavity by the diaphragm. This is a broad flat muscle. (muscular) diaphragm The diaphragm is a muscle that separat…e the thoracic from the abdominal cavity. The pelvis is the lowest part of the abdominal cavity and it has no physical separation from it Diaphragm." + } + }, + { + "_index": "cohere-embeddings", + "_id": "IOFWCY4BECzWLnMZuI_8", + "_score": 0.7154432, + "_source": { + "id": 1692489, + "content": "Muscular Wall Separating the Abdominal and Thoracic Cavities; Thoracic Cavity of a Fetal Pig; In Mammals the Diaphragm Separates the Abdominal Cavity from the" + } + }, + { + "_index": "cohere-embeddings", + "_id": "C-FWCY4BECzWLnMZuI_8", + "_score": 0.695313, + "_source": { + "id": 1691493, + "content": "Burning, aching, tenderness and stiffness are just some descriptors of the discomfort you may feel in the muscles you exercised one to two days ago.For the most part, these sensations you experience after exercise are collectively known as delayed onset muscle soreness.urning, aching, tenderness and stiffness are just some descriptors of the discomfort you may feel in the muscles you exercised one to two days ago." + } + }, + (...) + ] +-------------------------------------------------- +// NOTCONSOLE + +// end::cohere[] + + +// tag::openai[] + +[source,console] +-------------------------------------------------- +GET openai-embeddings/_search +{ + "knn": { + "field": "content_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "openai_embeddings", + "model_text": "Calculate fuel cost" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "content" + ] +} +-------------------------------------------------- +// TEST[skip:TBD] + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `openai-embeddings` index sorted by their proximity to the query: + +[source,consol-result] +-------------------------------------------------- +"hits": [ + { + "_index": "openai-embeddings", + "_id": "DDd5OowBHxQKHyc3TDSC", + "_score": 0.83704096, + "_source": { + "id": 862114, + "body": "How to calculate fuel cost for a road trip. By Tara Baukus Mello • Bankrate.com. Dear Driving for Dollars, My family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost.It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes.y family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost. It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes." + } + }, + { + "_index": "openai-embeddings", + "_id": "ajd5OowBHxQKHyc3TDSC", + "_score": 0.8345704, + "_source": { + "id": 820622, + "body": "Home Heating Calculator. Typically, approximately 50% of the energy consumed in a home annually is for space heating. When deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important.This calculator can help you estimate the cost of fuel for different heating appliances.hen deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important. This calculator can help you estimate the cost of fuel for different heating appliances." + } + }, + { + "_index": "openai-embeddings", + "_id": "Djd5OowBHxQKHyc3TDSC", + "_score": 0.8327426, + "_source": { + "id": 8202683, + "body": "Fuel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel.If you are paying $4 per gallon, the trip would cost you $200.Most boats have much larger gas tanks than cars.uel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel." + } + }, + (...) + ] +-------------------------------------------------- +// NOTCONSOLE + +// end::openai[] \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc new file mode 100644 index 0000000000000..bc54bf6b14ddf --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc @@ -0,0 +1,39 @@ +++++ +
    +
    + + +
    +
    +++++ + +include::infer-api-task.asciidoc[tag=cohere] + +++++ +
    + +
    +++++ \ No newline at end of file diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc new file mode 100644 index 0000000000000..3395fea9cc053 --- /dev/null +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -0,0 +1,56 @@ +// tag::cohere[] + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/cohere_embeddings <1> +{ + "service": "cohere", + "service_settings": { + "api_key": "", <2> + "model_id": "embed-english-light-v3.0", <3> + "embedding_type": "int8" + }, + "task_settings": { + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `text_embedding` in the path. +<2> The API key of your Cohere account. You can find your API keys in your +Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. You need to provide +your API key only once. The <> does not return your API +key. +<3> The name of the embedding model to use. You can find the list of Cohere +embedding models https://docs.cohere.com/reference/embed[here]. + +// end::cohere[] + + +// tag::openai[] + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/openai_embeddings <1> +{ + "service": "openai", + "service_settings": { + "api_key": "", <2> + "model_id": "text-embedding-ada-002" <3> + }, + "task_settings": { + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `text_embedding` in the path. +<2> The API key of your OpenAI account. You can find your OpenAI API keys in +your OpenAI account under the +https://platform.openai.com/api-keys[API keys section]. You need to provide +your API key only once. The <> does not return your API +key. +<3> The name of the embedding model to use. You can find the list of OpenAI +embedding models +https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. + +// end::openai[] \ No newline at end of file From 89786f59c885d289be144903b35b160b77e8689e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Mar 2024 11:42:36 -0500 Subject: [PATCH 103/107] Test mute for #105918 (#105919) mute for: https://github.com/elastic/elasticsearch/issues/105918 --- .../xpack/esql/querydsl/query/SingleValueQueryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index f773904ed8973..1d62bc0b6eaaa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -137,6 +137,7 @@ public void testNotMatchNone() throws IOException { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105918") public void testNotMatchSome() throws IOException { int max = between(1, 100); testCase( From 6f87bd379f653ab34d912d68d123eca7bfae4060 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 4 Mar 2024 12:23:48 -0500 Subject: [PATCH 104/107] Skipping tests that are failing because of timezone field (#105924) Muting tests failing related to https://github.com/elastic/elasticsearch/issues/105840 --- .../elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java | 3 +++ .../xpack/sql/qa/jdbc/JdbcErrorsTestCase.java | 2 ++ .../xpack/sql/qa/jdbc/PreparedStatementTestCase.java | 2 ++ .../elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java | 7 ++++++- .../xpack/sql/qa/security/JdbcSecurityIT.java | 1 + 5 files changed, 14 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java index b8af2ae44623a..ec20cc3c64104 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java @@ -89,6 +89,7 @@ public void testScroll() throws SQLException { * Test for {@code SELECT} that is implemented as a scroll query. * In this test we don't retrieve all records and rely on close() to clean the cursor */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testIncompleteScroll() throws SQLException { try (Connection c = esJdbc(); Statement s = c.createStatement()) { s.setFetchSize(4); @@ -152,6 +153,7 @@ public void testScrollWithDatetimeAndTimezoneParam() throws IOException, SQLExce /** * Test for {@code SELECT} that is implemented as an aggregation. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testAggregation() throws SQLException { try (Connection c = esJdbc(); Statement s = c.createStatement()) { s.setFetchSize(4); @@ -170,6 +172,7 @@ public void testAggregation() throws SQLException { /** * Test for nested documents. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testNestedDocuments() throws SQLException { try (Connection c = esJdbc(); Statement s = c.createStatement()) { s.setFetchSize(5); diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcErrorsTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcErrorsTestCase.java index e962f35be2a94..bd49ef0f6b39d 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcErrorsTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcErrorsTestCase.java @@ -78,6 +78,7 @@ public void testSelectProjectScoreInAggContext() throws IOException, SQLExceptio } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testSelectOrderByScoreInAggContext() throws IOException, SQLException { index("test", body -> body.field("foo", 1)); try (Connection c = esJdbc()) { @@ -111,6 +112,7 @@ public void testSelectScoreSubField() throws IOException, SQLException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testHardLimitForSortOnAggregate() throws IOException, SQLException { index("test", body -> body.field("a", 1).field("b", 2)); try (Connection c = esJdbc()) { diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/PreparedStatementTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/PreparedStatementTestCase.java index b2b983803260c..6575ff780ccb8 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/PreparedStatementTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/PreparedStatementTestCase.java @@ -301,6 +301,7 @@ public void testWildcardField() throws IOException, SQLException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testConstantKeywordField() throws IOException, SQLException { String mapping = """ "properties":{"id":{"type":"integer"},"text":{"type":"constant_keyword"}}"""; @@ -368,6 +369,7 @@ public void testTooMayParameters() throws IOException, SQLException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testStringEscaping() throws SQLException { try (Connection connection = esJdbc()) { try (PreparedStatement statement = connection.prepareStatement("SELECT ?, ?, ?, ?")) { diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java index d99fb9674818c..d8534b963c2d7 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java @@ -73,7 +73,6 @@ import static org.elasticsearch.common.time.DateUtils.toMilliSeconds; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.JDBC_DRIVER_VERSION; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.JDBC_TIMEZONE; -import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.UNSIGNED_LONG_MAX; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.UNSIGNED_LONG_TYPE_NAME; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.asDate; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.asTime; @@ -846,6 +845,7 @@ public void testGettingValidNumbersWithCastingFromUnsignedLong() throws IOExcept } // Double values testing + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testGettingValidDoubleWithoutCasting() throws IOException, SQLException { List doubleTestValues = createTestDataForNumericValueTests(ESTestCase::randomDouble); double random1 = doubleTestValues.get(0); @@ -1158,6 +1158,7 @@ public void testGettingValidBigDecimalFromFloatWithoutCasting() throws IOExcepti ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testGettingValidBigDecimalFromDoubleWithoutCasting() throws IOException, SQLException { List doubleTestValues = createTestDataForNumericValueTests(ESTestCase::randomDouble); doWithQuery( @@ -1405,6 +1406,7 @@ public void testGettingDateWithoutCalendarWithNanos() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testGettingDateWithCalendar() throws Exception { long randomLongDate = randomMillisUpToYear9999(); setupDataForDateTimeTests(randomLongDate); @@ -1434,6 +1436,7 @@ public void testGettingDateWithCalendar() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testGettingDateWithCalendarWithNanos() throws Exception { assumeTrue( "Driver version [" + JDBC_DRIVER_VERSION + "] doesn't support DATETIME with nanosecond resolution]", @@ -1597,6 +1600,7 @@ public void testGettingTimestampWithoutCalendar() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testGettingTimestampWithoutCalendarWithNanos() throws Exception { assumeTrue( "Driver version [" + JDBC_DRIVER_VERSION + "] doesn't support DATETIME with nanosecond resolution]", @@ -1929,6 +1933,7 @@ public void testGetTimeType() throws IOException, SQLException { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testValidGetObjectCalls() throws IOException, SQLException { createIndexWithMapping("test"); updateMappingForNumericValuesTests("test"); diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java index 0e0c2bc8d78b4..6a46346f627ac 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/JdbcSecurityIT.java @@ -345,6 +345,7 @@ public void testMetadataGetColumnsSingleFieldExcepted() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105840") public void testMetadataGetColumnsDocumentExcluded() throws Exception { createUser("no_3s", "read_test_without_c_3"); From 6ab69e5bc947dd708d4ff9c2c3a1c141c87ec769 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 4 Mar 2024 13:22:14 -0500 Subject: [PATCH 105/107] ESQL: Don't test field extraction in 8.11 (#105909) We changed field extraction in ES|QL in 8.12 quite a bit so our tests would have to be super complex to test a cluster of mixed versions between 8.11 and `main`. So let's just skip it. Closes #105837 --- .../xpack/esql/qa/mixed/FieldExtractorIT.java | 2 -- .../xpack/esql/qa/rest/FieldExtractorTestCase.java | 9 +++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/FieldExtractorIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/FieldExtractorIT.java index bdb10ea65dc1b..8c1e47c29670a 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/FieldExtractorIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/FieldExtractorIT.java @@ -9,13 +9,11 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.FieldExtractorTestCase; import org.junit.ClassRule; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105837") @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class FieldExtractorIT extends FieldExtractorTestCase { @ClassRule diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 3f8caa3bdf5d4..39c21651a7e02 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -27,6 +27,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matcher; +import org.junit.Before; import java.io.IOException; import java.math.BigDecimal; @@ -57,6 +58,14 @@ public abstract class FieldExtractorTestCase extends ESRestTestCase { private static final Logger logger = LogManager.getLogger(FieldExtractorTestCase.class); + @Before + public void notOld() { + assumeTrue( + "support changed pretty radically in 8.12 so we don't test against 8.11", + getCachedNodesVersions().stream().allMatch(v -> Version.fromString(v).onOrAfter(Version.V_8_12_0)) + ); + } + public void testTextField() throws IOException { textTest().test(randomAlphaOfLength(20)); } From 93fd12d6dbddd11f2c8ff0ed76eeb8c9ce5e32d0 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Mon, 4 Mar 2024 19:34:40 +0100 Subject: [PATCH 106/107] Update health YAML REST test skip version (#105927) The health report API changed names in https://github.com/elastic/elasticsearch/pull/92879, which causes this YAML REST test to fail in versions < 8.7.0. Closes #105923 --- .../resources/rest-api-spec/test/health/10_basic.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index 1dc35c165b4e0..a000a9eac16ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -1,8 +1,8 @@ --- "cluster health basic test": - skip: - version: "- 8.3.99" - reason: "health was only added in 8.2.0, and master_is_stable in 8.4.0" + version: "- 8.6.99" + reason: "health was added in 8.2.0, master_is_stable in 8.4.0, and REST API updated in 8.7" - do: health_report: { } From ca10472541159f19bfd5e0af3c253d87eb4228cc Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Mar 2024 07:14:35 +0100 Subject: [PATCH 107/107] Make use of ActionListener#delegateFailureAndWrap in more spots in ML codebase (#105882) Found a bunch more spots where this shortcut helps save both memory and brainpower for thinking through potential leaks. => made use of it and sometimes also inlined a couple local variables for readability. --- .../xpack/ml/datafeed/DatafeedManager.java | 57 ++++++++------- .../extractor/DataExtractorFactory.java | 7 +- .../persistence/DatafeedConfigProvider.java | 28 ++++---- .../dataframe/DataFrameAnalyticsManager.java | 69 +++++++++---------- .../xpack/ml/dataframe/DestinationIndex.java | 8 ++- .../DataFrameDataExtractorFactory.java | 30 ++++---- .../ExtractedFieldsDetectorFactory.java | 7 +- .../DataFrameAnalyticsConfigProvider.java | 60 ++++++++-------- .../DataFrameAnalyticsDeleter.java | 28 ++++---- .../steps/AbstractDataFrameAnalyticsStep.java | 6 +- .../ml/dataframe/steps/AnalysisStep.java | 11 ++- .../xpack/ml/dataframe/steps/FinalStep.java | 17 ++--- .../ml/dataframe/steps/InferenceStep.java | 62 ++++++++--------- .../xpack/ml/job/JobManager.java | 62 ++++++++--------- .../AbstractExpiredJobDataRemover.java | 8 +-- .../job/retention/EmptyStateIndexRemover.java | 42 ++++++----- .../ExpiredModelSnapshotsRemover.java | 6 +- .../job/retention/ExpiredResultsRemover.java | 10 +-- .../task/OpenJobPersistentTasksExecutor.java | 8 +-- .../EmptyStateIndexRemoverTests.java | 3 + .../ExpiredAnnotationsRemoverTests.java | 1 + .../retention/ExpiredResultsRemoverTests.java | 1 + 22 files changed, 261 insertions(+), 270 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index ede57764a0813..d44d2181f0ce8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -121,9 +121,8 @@ public void putDatafeed( final RoleDescriptor.IndicesPrivileges.Builder indicesPrivilegesBuilder = RoleDescriptor.IndicesPrivileges.builder() .indices(indices); - ActionListener privResponseListener = ActionListener.wrap( - r -> handlePrivsResponse(username, request, r, state, threadPool, listener), - listener::onFailure + ActionListener privResponseListener = listener.delegateFailureAndWrap( + (l, r) -> handlePrivsResponse(username, request, r, state, threadPool, l) ); ActionListener getRollupIndexCapsActionHandler = ActionListener.wrap(response -> { @@ -173,15 +172,14 @@ public void getDatafeeds( request.getDatafeedId(), request.allowNoMatch(), parentTaskId, - ActionListener.wrap( - datafeedBuilders -> listener.onResponse( + listener.delegateFailureAndWrap( + (l, datafeedBuilders) -> l.onResponse( new QueryPage<>( datafeedBuilders.stream().map(DatafeedConfig.Builder::build).collect(Collectors.toList()), datafeedBuilders.size(), DatafeedConfig.RESULTS_FIELD ) - ), - listener::onFailure + ) ) ); } @@ -222,10 +220,7 @@ public void updateDatafeed( request.getUpdate(), headers, jobConfigProvider::validateDatafeedJob, - ActionListener.wrap( - updatedConfig -> listener.onResponse(new PutDatafeedAction.Response(updatedConfig)), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, updatedConfig) -> l.onResponse(new PutDatafeedAction.Response(updatedConfig))) ); }); @@ -254,19 +249,18 @@ public void deleteDatafeed(DeleteDatafeedAction.Request request, ClusterState st String datafeedId = request.getDatafeedId(); - datafeedConfigProvider.getDatafeedConfig(datafeedId, null, ActionListener.wrap(datafeedConfigBuilder -> { + datafeedConfigProvider.getDatafeedConfig(datafeedId, null, listener.delegateFailureAndWrap((delegate, datafeedConfigBuilder) -> { String jobId = datafeedConfigBuilder.build().getJobId(); JobDataDeleter jobDataDeleter = new JobDataDeleter(client, jobId); jobDataDeleter.deleteDatafeedTimingStats( - ActionListener.wrap( - unused1 -> datafeedConfigProvider.deleteDatafeedConfig( + delegate.delegateFailureAndWrap( + (l, unused1) -> datafeedConfigProvider.deleteDatafeedConfig( datafeedId, - ActionListener.wrap(unused2 -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) - ), - listener::onFailure + l.delegateFailureAndWrap((ll, unused2) -> ll.onResponse(AcknowledgedResponse.TRUE)) + ) ) ); - }, listener::onFailure)); + })); } @@ -316,7 +310,7 @@ private void putDatafeed( CheckedConsumer mappingsUpdated = ok -> datafeedConfigProvider.putDatafeedConfig( request.getDatafeed(), headers, - ActionListener.wrap(response -> listener.onResponse(new PutDatafeedAction.Response(response.v1())), listener::onFailure) + listener.delegateFailureAndWrap((l, response) -> l.onResponse(new PutDatafeedAction.Response(response.v1()))) ); CheckedConsumer validationOk = ok -> { @@ -345,16 +339,19 @@ private void putDatafeed( } private void checkJobDoesNotHaveADatafeed(String jobId, ActionListener listener) { - datafeedConfigProvider.findDatafeedIdsForJobIds(Collections.singletonList(jobId), ActionListener.wrap(datafeedIds -> { - if (datafeedIds.isEmpty()) { - listener.onResponse(Boolean.TRUE); - } else { - listener.onFailure( - ExceptionsHelper.conflictStatusException( - "A datafeed [" + datafeedIds.iterator().next() + "] already exists for job [" + jobId + "]" - ) - ); - } - }, listener::onFailure)); + datafeedConfigProvider.findDatafeedIdsForJobIds( + Collections.singletonList(jobId), + listener.delegateFailureAndWrap((delegate, datafeedIds) -> { + if (datafeedIds.isEmpty()) { + delegate.onResponse(Boolean.TRUE); + } else { + delegate.onFailure( + ExceptionsHelper.conflictStatusException( + "A datafeed [" + datafeedIds.iterator().next() + "] already exists for job [" + jobId + "]" + ) + ); + } + }) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java index be2c8dd871a9b..bcdf5e83cc5ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java @@ -59,13 +59,12 @@ static void create( ) { final boolean hasAggs = datafeed.hasAggregations(); final boolean isComposite = hasAggs && datafeed.hasCompositeAgg(xContentRegistry); - ActionListener factoryHandler = ActionListener.wrap( - factory -> listener.onResponse( + ActionListener factoryHandler = listener.delegateFailureAndWrap( + (l, factory) -> l.onResponse( datafeed.getChunkingConfig().isEnabled() ? new ChunkedDataExtractorFactory(datafeed, job, xContentRegistry, factory) : factory - ), - listener::onFailure + ) ); ActionListener getRollupIndexCapsActionHandler = ActionListener.wrap(response -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index e226056217351..fbabc9903c4cc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -223,7 +223,7 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerwrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job assert response.getHits().getTotalHits().value <= jobIds.size(); @@ -233,8 +233,8 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerwrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job assert response.getHits().getTotalHits().value <= jobIds.size(); @@ -265,8 +265,8 @@ public void findDatafeedsByJobIds( DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); datafeedsByJobId.put(builder.getJobId(), builder); } - listener.onResponse(datafeedsByJobId); - }, listener::onFailure), + delegate.onResponse(datafeedsByJobId); + }), client::search ); } @@ -440,7 +440,7 @@ public void expandDatafeedIds( client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { SortedSet datafeedIds = new TreeSet<>(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -453,12 +453,12 @@ public void expandDatafeedIds( requiredMatches.filterMatchedIds(datafeedIds); if (requiredMatches.hasUnmatchedIds()) { // some required datafeeds were not found - listener.onFailure(ExceptionsHelper.missingDatafeedException(requiredMatches.unmatchedIdsString())); + delegate.onFailure(ExceptionsHelper.missingDatafeedException(requiredMatches.unmatchedIdsString())); return; } - listener.onResponse(datafeedIds); - }, listener::onFailure), + delegate.onResponse(datafeedIds); + }), client::search ); @@ -502,7 +502,7 @@ public void expandDatafeedConfigs( client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { List datafeeds = new ArrayList<>(); Set datafeedIds = new HashSet<>(); SearchHit[] hits = response.getHits().getHits(); @@ -521,12 +521,12 @@ public void expandDatafeedConfigs( requiredMatches.filterMatchedIds(datafeedIds); if (requiredMatches.hasUnmatchedIds()) { // some required datafeeds were not found - listener.onFailure(ExceptionsHelper.missingDatafeedException(requiredMatches.unmatchedIdsString())); + delegate.onFailure(ExceptionsHelper.missingDatafeedException(requiredMatches.unmatchedIdsString())); return; } - listener.onResponse(datafeeds); - }, listener::onFailure), + delegate.onResponse(datafeeds); + }), client::search ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 223154737df3f..d370e8af52549 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.dataframe.extractor.ExtractedFieldsDetector; import org.elasticsearch.xpack.ml.dataframe.extractor.ExtractedFieldsDetectorFactory; import org.elasticsearch.xpack.ml.dataframe.inference.InferenceRunner; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; @@ -171,9 +170,8 @@ public void execute(DataFrameAnalyticsTask task, ClusterState clusterState, Time }, task::setFailed); // Retrieve configuration - ActionListener statsIndexListener = ActionListener.wrap( - aBoolean -> configProvider.get(task.getParams().getId(), configListener), - configListener::onFailure + ActionListener statsIndexListener = configListener.delegateFailureAndWrap( + (l, aBoolean) -> configProvider.get(task.getParams().getId(), l) ); // Make sure the stats index and alias exist @@ -203,25 +201,22 @@ private void createStatsIndexAndUpdateMappingsIfNecessary( TimeValue masterNodeTimeout, ActionListener listener ) { - ActionListener createIndexListener = ActionListener.wrap( - aBoolean -> ElasticsearchMappings.addDocMappingIfMissing( - MlStatsIndex.writeAlias(), - MlStatsIndex::wrappedMapping, - clientToUse, - clusterState, - masterNodeTimeout, - listener, - MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION - ), - listener::onFailure - ); - MlStatsIndex.createStatsIndexAndAliasIfNecessary( clientToUse, clusterState, expressionResolver, masterNodeTimeout, - createIndexListener + listener.delegateFailureAndWrap( + (l, aBoolean) -> ElasticsearchMappings.addDocMappingIfMissing( + MlStatsIndex.writeAlias(), + MlStatsIndex::wrappedMapping, + clientToUse, + clusterState, + masterNodeTimeout, + l, + MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION + ) + ) ); } @@ -306,25 +301,25 @@ private void executeJobInMiddleOfReindexing(DataFrameAnalyticsTask task, DataFra private void buildInferenceStep(DataFrameAnalyticsTask task, DataFrameAnalyticsConfig config, ActionListener listener) { ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); - - ActionListener extractedFieldsDetectorListener = ActionListener.wrap(extractedFieldsDetector -> { - ExtractedFields extractedFields = extractedFieldsDetector.detect().v1(); - InferenceRunner inferenceRunner = new InferenceRunner( - settings, - parentTaskClient, - modelLoadingService, - resultsPersisterService, - task.getParentTaskId(), - config, - extractedFields, - task.getStatsHolder().getProgressTracker(), - task.getStatsHolder().getDataCountsTracker() - ); - InferenceStep inferenceStep = new InferenceStep(client, task, auditor, config, threadPool, inferenceRunner); - listener.onResponse(inferenceStep); - }, listener::onFailure); - - new ExtractedFieldsDetectorFactory(parentTaskClient).createFromDest(config, extractedFieldsDetectorListener); + new ExtractedFieldsDetectorFactory(parentTaskClient).createFromDest( + config, + listener.delegateFailureAndWrap((delegate, extractedFieldsDetector) -> { + ExtractedFields extractedFields = extractedFieldsDetector.detect().v1(); + InferenceRunner inferenceRunner = new InferenceRunner( + settings, + parentTaskClient, + modelLoadingService, + resultsPersisterService, + task.getParentTaskId(), + config, + extractedFields, + task.getStatsHolder().getProgressTracker(), + task.getStatsHolder().getDataCountsTracker() + ); + InferenceStep inferenceStep = new InferenceStep(client, task, auditor, config, threadPool, inferenceRunner); + delegate.onResponse(inferenceStep); + }) + ); } public boolean isNodeShuttingDown() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index 81de8add4ae2e..8623f456b2035 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -134,9 +134,11 @@ private static void prepareCreateIndexRequest( AtomicReference settingsHolder = new AtomicReference<>(); AtomicReference mappingsHolder = new AtomicReference<>(); - ActionListener fieldCapabilitiesListener = ActionListener.wrap(fieldCapabilitiesResponse -> { - listener.onResponse(createIndexRequest(clock, config, settingsHolder.get(), mappingsHolder.get(), fieldCapabilitiesResponse)); - }, listener::onFailure); + ActionListener fieldCapabilitiesListener = listener.delegateFailureAndWrap( + (l, fieldCapabilitiesResponse) -> l.onResponse( + createIndexRequest(clock, config, settingsHolder.get(), mappingsHolder.get(), fieldCapabilitiesResponse) + ) + ); ActionListener mappingsListener = ActionListener.wrap(mappings -> { mappingsHolder.set(mappings); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java index b9d7e31a2cf73..09c3ae15c90a3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorFactory.java @@ -147,22 +147,22 @@ public static void createForDestinationIndex( ActionListener listener ) { ExtractedFieldsDetectorFactory extractedFieldsDetectorFactory = new ExtractedFieldsDetectorFactory(client); - extractedFieldsDetectorFactory.createFromDest(config, ActionListener.wrap(extractedFieldsDetector -> { + extractedFieldsDetectorFactory.createFromDest(config, listener.delegateFailureAndWrap((delegate, extractedFieldsDetector) -> { ExtractedFields extractedFields = extractedFieldsDetector.detect().v1(); - - DataFrameDataExtractorFactory extractorFactory = new DataFrameDataExtractorFactory( - client, - config.getId(), - Collections.singletonList(config.getDest().getIndex()), - config.getSource().getParsedQuery(), - extractedFields, - config.getAnalysis().getRequiredFields(), - config.getHeaders(), - config.getAnalysis().supportsMissingValues(), - createTrainTestSplitterFactory(client, config, extractedFields), - Collections.emptyMap() + delegate.onResponse( + new DataFrameDataExtractorFactory( + client, + config.getId(), + Collections.singletonList(config.getDest().getIndex()), + config.getSource().getParsedQuery(), + extractedFields, + config.getAnalysis().getRequiredFields(), + config.getHeaders(), + config.getAnalysis().supportsMissingValues(), + createTrainTestSplitterFactory(client, config, extractedFields), + Collections.emptyMap() + ) ); - listener.onResponse(extractorFactory); - }, listener::onFailure)); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java index 49e25c95713ef..73f8e7bd520d4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorFactory.java @@ -112,11 +112,6 @@ private void getCardinalitiesForFieldsWithConstraints( return; } - ActionListener searchListener = ActionListener.wrap( - searchResponse -> buildFieldCardinalitiesMap(config, searchResponse, listener), - listener::onFailure - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0) .query(config.getSource().getParsedQuery()) .runtimeMappings(config.getSource().getRuntimeMappings()); @@ -147,7 +142,7 @@ private void getCardinalitiesForFieldsWithConstraints( client, TransportSearchAction.TYPE, searchRequest, - searchListener + listener.delegateFailureAndWrap((l, searchResponse) -> buildFieldCardinalitiesMap(config, searchResponse, l)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 5469c6a7a7d87..8c7d490f37787 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -103,19 +103,17 @@ public void put( TimeValue timeout, ActionListener listener ) { - - ActionListener deleteLeftOverDocsListener = ActionListener.wrap( - r -> index(prepareConfigForIndex(config, headers), null, listener), - listener::onFailure - ); - - ActionListener existsListener = ActionListener.wrap(exists -> { + ActionListener existsListener = listener.delegateFailureAndWrap((l, exists) -> { if (exists) { - listener.onFailure(ExceptionsHelper.dataFrameAnalyticsAlreadyExists(config.getId())); + l.onFailure(ExceptionsHelper.dataFrameAnalyticsAlreadyExists(config.getId())); } else { - deleteLeftOverDocs(config, timeout, deleteLeftOverDocsListener); + deleteLeftOverDocs( + config, + timeout, + l.delegateFailureAndWrap((ll, r) -> index(prepareConfigForIndex(config, headers), null, ll)) + ); } - }, listener::onFailure); + }); exists(config.getId(), existsListener); } @@ -194,10 +192,10 @@ public void update( DataFrameAnalyticsConfig updatedConfig = updatedConfigBuilder.build(); // Index the update config - index(updatedConfig, getResponse, ActionListener.wrap(indexedConfig -> { + index(updatedConfig, getResponse, listener.delegateFailureAndWrap((l, indexedConfig) -> { auditor.info(id, Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_AUDIT_UPDATED, update.getUpdatedFields())); - listener.onResponse(indexedConfig); - }, listener::onFailure)); + l.onResponse(indexedConfig); + })); }, listener::onFailure)); } @@ -269,20 +267,26 @@ private void index( public void get(String id, ActionListener listener) { GetDataFrameAnalyticsAction.Request request = new GetDataFrameAnalyticsAction.Request(); request.setResourceId(id); - executeAsyncWithOrigin(client, ML_ORIGIN, GetDataFrameAnalyticsAction.INSTANCE, request, ActionListener.wrap(response -> { - List analytics = response.getResources().results(); - if (analytics.size() != 1) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Expected a single match for data frame analytics [{}] " + "but got [{}]", - id, - analytics.size() - ) - ); - } else { - listener.onResponse(analytics.get(0)); - } - }, listener::onFailure)); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + GetDataFrameAnalyticsAction.INSTANCE, + request, + listener.delegateFailureAndWrap((delegate, response) -> { + List analytics = response.getResources().results(); + if (analytics.size() != 1) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Expected a single match for data frame analytics [{}] " + "but got [{}]", + id, + analytics.size() + ) + ); + } else { + delegate.onResponse(analytics.get(0)); + } + }) + ); } /** @@ -298,7 +302,7 @@ public void getMultiple(String ids, boolean allowNoMatch, ActionListener listener.onResponse(response.getResources().results()), listener::onFailure) + listener.delegateFailureAndWrap((l, response) -> l.onResponse(response.getResources().results())) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsDeleter.java index 843d9d74a1c7d..2a8b23728fbdb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsDeleter.java @@ -126,14 +126,13 @@ private void deleteConfig(String id, ActionListener listen } private void deleteState(DataFrameAnalyticsConfig config, TimeValue timeout, ActionListener listener) { - ActionListener deleteModelStateListener = ActionListener.wrap( - r -> executeDeleteByQuery( + ActionListener deleteModelStateListener = listener.delegateFailureAndWrap( + (l, r) -> executeDeleteByQuery( AnomalyDetectorsIndex.jobStateIndexPattern(), QueryBuilders.idsQuery().addIds(StoredProgress.documentId(config.getId())), timeout, - listener - ), - listener::onFailure + l + ) ); deleteModelState(config, timeout, 1, deleteModelStateListener); @@ -146,13 +145,18 @@ private void deleteModelState(DataFrameAnalyticsConfig config, TimeValue timeout } IdsQueryBuilder query = QueryBuilders.idsQuery().addIds(config.getAnalysis().getStateDocIdPrefix(config.getId()) + docNum); - executeDeleteByQuery(AnomalyDetectorsIndex.jobStateIndexPattern(), query, timeout, ActionListener.wrap(response -> { - if (response.getDeleted() > 0) { - deleteModelState(config, timeout, docNum + 1, listener); - return; - } - listener.onResponse(true); - }, listener::onFailure)); + executeDeleteByQuery( + AnomalyDetectorsIndex.jobStateIndexPattern(), + query, + timeout, + listener.delegateFailureAndWrap((l, response) -> { + if (response.getDeleted() > 0) { + deleteModelState(config, timeout, docNum + 1, l); + return; + } + l.onResponse(true); + }) + ); } private void deleteStats(String jobId, TimeValue timeout, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java index 0c693ff2d34f4..112d164601546 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java @@ -67,11 +67,11 @@ public final void execute(ActionListener listener) { listener.onResponse(new StepResponse(true)); return; } - doExecute(ActionListener.wrap(stepResponse -> { + doExecute(listener.delegateFailureAndWrap((l, stepResponse) -> { // We persist progress at the end of each step to ensure we do not have // to repeat the step in case the node goes down without getting a chance to persist progress. - task.persistProgress(() -> listener.onResponse(stepResponse)); - }, listener::onFailure)); + task.persistProgress(() -> l.onResponse(stepResponse)); + })); } protected abstract void doExecute(ActionListener listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java index 9e56387ed773e..ec914546c7de5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java @@ -58,17 +58,16 @@ protected void doExecute(ActionListener listener) { final ParentTaskAssigningClient parentTaskClient = parentTaskClient(); // Update state to ANALYZING and start process - ActionListener dataExtractorFactoryListener = ActionListener.wrap( - dataExtractorFactory -> processManager.runJob(task, config, dataExtractorFactory, listener), - listener::onFailure + ActionListener dataExtractorFactoryListener = listener.delegateFailureAndWrap( + (l, dataExtractorFactory) -> processManager.runJob(task, config, dataExtractorFactory, l) ); - ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { + ActionListener refreshListener = dataExtractorFactoryListener.delegateFailureAndWrap((l, refreshResponse) -> { // TODO This could fail with errors. In that case we get stuck with the copied index. // We could delete the index in case of failure or we could try building the factory before reindexing // to catch the error early on. - DataFrameDataExtractorFactory.createForDestinationIndex(parentTaskClient, config, dataExtractorFactoryListener); - }, dataExtractorFactoryListener::onFailure); + DataFrameDataExtractorFactory.createForDestinationIndex(parentTaskClient, config, l); + }); // First we need to refresh the dest index to ensure data is searchable in case the job // was stopped after reindexing was complete but before the index was refreshed. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java index dbf1f3e7be3d9..258c66ad5cb0f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java @@ -59,18 +59,13 @@ public Name name() { @Override protected void doExecute(ActionListener listener) { - - ActionListener refreshListener = ActionListener.wrap( - refreshResponse -> listener.onResponse(new StepResponse(false)), - listener::onFailure - ); - - ActionListener dataCountsIndexedListener = ActionListener.wrap( - indexResponse -> refreshIndices(refreshListener), - listener::onFailure + indexDataCounts( + listener.delegateFailureAndWrap( + (l, indexResponse) -> refreshIndices( + l.delegateFailureAndWrap((ll, refreshResponse) -> ll.onResponse(new StepResponse(false))) + ) + ) ); - - indexDataCounts(dataCountsIndexedListener); } private void indexDataCounts(ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index ad005e6d9ae6c..37ad1a5cb8f56 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; @@ -70,27 +69,21 @@ protected void doExecute(ActionListener listener) { return; } - ActionListener modelIdListener = ActionListener.wrap(modelId -> runInference(modelId, listener), listener::onFailure); - - ActionListener testDocsExistListener = ActionListener.wrap(testDocsExist -> { - if (testDocsExist) { - getModelId(modelIdListener); - } else { - // no need to run inference at all so let us skip - // loading the model in memory. - LOGGER.debug(() -> "[" + config.getId() + "] Inference step completed immediately as there are no test docs"); - task.getStatsHolder().getProgressTracker().updateInferenceProgress(100); - listener.onResponse(new StepResponse(isTaskStopping())); - return; - } - }, listener::onFailure); - - ActionListener refreshDestListener = ActionListener.wrap( - refreshResponse -> searchIfTestDocsExist(testDocsExistListener), - listener::onFailure + refreshDestAsync( + listener.delegateFailureAndWrap( + (delegate, refreshResponse) -> searchIfTestDocsExist(delegate.delegateFailureAndWrap((delegate2, testDocsExist) -> { + if (testDocsExist) { + getModelId(delegate2.delegateFailureAndWrap((l, modelId) -> runInference(modelId, l))); + } else { + // no need to run inference at all so let us skip + // loading the model in memory. + LOGGER.debug(() -> "[" + config.getId() + "] Inference step completed immediately as there are no test docs"); + task.getStatsHolder().getProgressTracker().updateInferenceProgress(100); + delegate2.onResponse(new StepResponse(isTaskStopping())); + } + })) + ) ); - - refreshDestAsync(refreshDestListener); } private void runInference(String modelId, ActionListener listener) { @@ -124,10 +117,7 @@ private void searchIfTestDocsExist(ActionListener listener) { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap( - searchResponse -> listener.onResponse(searchResponse.getHits().getTotalHits().value > 0), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) ); } @@ -142,14 +132,20 @@ private void getModelId(ActionListener listener) { SearchRequest searchRequest = new SearchRequest(InferenceIndexConstants.INDEX_PATTERN); searchRequest.source(searchSourceBuilder); - executeAsyncWithOrigin(client, ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - SearchHit[] hits = searchResponse.getHits().getHits(); - if (hits.length == 0) { - listener.onFailure(new ResourceNotFoundException("No model could be found to perform inference")); - } else { - listener.onResponse(hits[0].getId()); - } - }, listener::onFailure)); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportSearchAction.TYPE, + searchRequest, + listener.delegateFailureAndWrap((l, searchResponse) -> { + SearchHit[] hits = searchResponse.getHits().getHits(); + if (hits.length == 0) { + l.onFailure(new ResourceNotFoundException("No model could be found to perform inference")); + } else { + l.onResponse(hits[0].getId()); + } + }) + ); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 7532ae4317830..9887152c6f311 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -142,14 +142,8 @@ public void jobExists(String jobId, @Nullable TaskId parentTaskId, ActionListene * a ResourceNotFoundException is returned */ public void getJob(String jobId, ActionListener jobListener) { - jobConfigProvider.getJob( - jobId, - null, - ActionListener.wrap( - r -> jobListener.onResponse(r.build()), // TODO JIndex we shouldn't be building the job here - jobListener::onFailure - ) - ); + // TODO JIndex we shouldn't be building the job here + jobConfigProvider.getJob(jobId, null, jobListener.delegateFailureAndWrap((l, r) -> l.onResponse(r.build()))); } /** @@ -183,15 +177,14 @@ public void expandJobs(String expression, boolean allowNoMatch, ActionListener jobsListener.onResponse( + jobsListener.delegateFailureAndWrap( + (l, jobBuilders) -> l.onResponse( new QueryPage<>( jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), jobBuilders.size(), Job.RESULTS_FIELD ) - ), - jobsListener::onFailure + ) ) ); } @@ -253,10 +246,10 @@ public void putJob( @Override public void onResponse(Boolean mappingsUpdated) { - jobConfigProvider.putJob(job, ActionListener.wrap(response -> { + jobConfigProvider.putJob(job, actionListener.delegateFailureAndWrap((l, response) -> { auditor.info(job.getId(), Messages.getMessage(Messages.JOB_AUDIT_CREATED)); - actionListener.onResponse(new PutJobAction.Response(job)); - }, actionListener::onFailure)); + l.onResponse(new PutJobAction.Response(job)); + })); } @Override @@ -275,17 +268,16 @@ public void onFailure(Exception e) { } }; - ActionListener addDocMappingsListener = ActionListener.wrap( - indicesCreated -> ElasticsearchMappings.addDocMappingIfMissing( + ActionListener addDocMappingsListener = putJobListener.delegateFailureAndWrap( + (l, indicesCreated) -> ElasticsearchMappings.addDocMappingIfMissing( MlConfigIndex.indexName(), MlConfigIndex::mapping, client, state, request.masterNodeTimeout(), - putJobListener, + l, MlConfigIndex.CONFIG_INDEX_MAPPINGS_VERSION - ), - putJobListener::onFailure + ) ); ActionListener> checkForLeftOverDocs = ActionListener.wrap(matchedIds -> { @@ -634,14 +626,15 @@ public void updateProcessOnCalendarChanged(List calendarJobIds, ActionLi // calendarJobIds may be a group or job jobConfigProvider.expandGroupIds( calendarJobIds, - ActionListener.wrap(expandedIds -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { - // Merge the expanded group members with the request Ids. - // Ids that aren't jobs will be filtered by isJobOpen() - expandedIds.addAll(calendarJobIds); - - openJobIds.retainAll(expandedIds); - submitJobEventUpdate(openJobIds, updateListener); - }), updateListener::onFailure) + updateListener.delegateFailureAndWrap( + (delegate, expandedIds) -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + // Merge the expanded group members with the request Ids. + // Ids that aren't jobs will be filtered by isJobOpen() + expandedIds.addAll(calendarJobIds); + openJobIds.retainAll(expandedIds); + submitJobEventUpdate(openJobIds, delegate); + }) + ) ); } @@ -678,12 +671,13 @@ public void revertSnapshot( jobResultsPersister.persistQuantiles( modelSnapshot.getQuantiles(), WriteRequest.RefreshPolicy.IMMEDIATE, - ActionListener.wrap(quantilesResponse -> { - // The quantiles can be large, and totally dominate the output - - // it's clearer to remove them as they are not necessary for the revert op - ModelSnapshot snapshotWithoutQuantiles = new ModelSnapshot.Builder(modelSnapshot).setQuantiles(null).build(); - actionListener.onResponse(new RevertModelSnapshotAction.Response(snapshotWithoutQuantiles)); - }, actionListener::onFailure) + // The quantiles can be large, and totally dominate the output - + // it's clearer to remove them as they are not necessary for the revert op + actionListener.delegateFailureAndWrap( + (l, quantilesResponse) -> l.onResponse( + new RevertModelSnapshotAction.Response(new ModelSnapshot.Builder(modelSnapshot).setQuantiles(null).build()) + ) + ) ); }; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java index aa82c7a261b96..bd1e47e3cb160 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java @@ -70,19 +70,19 @@ private void removeData( return; } - calcCutoffEpochMs(job.getId(), retentionDays, ActionListener.wrap(response -> { + calcCutoffEpochMs(job.getId(), retentionDays, listener.delegateFailureAndWrap((delegate, response) -> { if (response == null) { - removeData(jobIterator, requestsPerSecond, listener, isTimedOutSupplier); + removeData(jobIterator, requestsPerSecond, delegate, isTimedOutSupplier); } else { removeDataBefore( job, requestsPerSecond, response.latestTimeMs, response.cutoffEpochMs, - ActionListener.wrap(r -> removeData(jobIterator, requestsPerSecond, listener, isTimedOutSupplier), listener::onFailure) + delegate.delegateFailureAndWrap((l, r) -> removeData(jobIterator, requestsPerSecond, l, isTimedOutSupplier)) ); } - }, listener::onFailure)); + })); } abstract void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java index 0a5612f8e0ccc..1c8c100939dc7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemover.java @@ -42,20 +42,20 @@ public void remove(float requestsPerSec, ActionListener listener, Boole listener.onResponse(false); return; } - getEmptyStateIndices(ActionListener.wrap(emptyStateIndices -> { + getEmptyStateIndices(listener.delegateFailureAndWrap((delegate, emptyStateIndices) -> { if (emptyStateIndices.isEmpty()) { - listener.onResponse(true); + delegate.onResponse(true); return; } - getCurrentStateIndices(ActionListener.wrap(currentStateIndices -> { + getCurrentStateIndices(delegate.delegateFailureAndWrap((l, currentStateIndices) -> { Set stateIndicesToRemove = Sets.difference(emptyStateIndices, currentStateIndices); if (stateIndicesToRemove.isEmpty()) { - listener.onResponse(true); + l.onResponse(true); return; } - executeDeleteEmptyStateIndices(stateIndicesToRemove, listener); - }, listener::onFailure)); - }, listener::onFailure)); + executeDeleteEmptyStateIndices(stateIndicesToRemove, l); + })); + })); } catch (Exception e) { listener.onFailure(e); } @@ -64,15 +64,21 @@ public void remove(float requestsPerSec, ActionListener listener, Boole private void getEmptyStateIndices(ActionListener> listener) { IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(AnomalyDetectorsIndex.jobStateIndexPattern()); indicesStatsRequest.setParentTask(parentTaskId); - client.admin().indices().stats(indicesStatsRequest, ActionListener.wrap(indicesStatsResponse -> { - Set emptyStateIndices = indicesStatsResponse.getIndices() - .values() - .stream() - .filter(stats -> stats.getTotal().getDocs().getCount() == 0) - .map(IndexStats::getIndex) - .collect(toSet()); - listener.onResponse(emptyStateIndices); - }, listener::onFailure)); + client.admin() + .indices() + .stats( + indicesStatsRequest, + listener.delegateFailureAndWrap( + (l, indicesStatsResponse) -> l.onResponse( + indicesStatsResponse.getIndices() + .values() + .stream() + .filter(stats -> stats.getTotal().getDocs().getCount() == 0) + .map(IndexStats::getIndex) + .collect(toSet()) + ) + ) + ); } private void getCurrentStateIndices(ActionListener> listener) { @@ -82,7 +88,7 @@ private void getCurrentStateIndices(ActionListener> listener) { .indices() .getIndex( getIndexRequest, - ActionListener.wrap(getIndexResponse -> listener.onResponse(Set.of(getIndexResponse.getIndices())), listener::onFailure) + listener.delegateFailureAndWrap((l, getIndexResponse) -> l.onResponse(Set.of(getIndexResponse.getIndices()))) ); } @@ -93,7 +99,7 @@ private void executeDeleteEmptyStateIndices(Set emptyStateIndices, Actio .indices() .delete( deleteIndexRequest, - ActionListener.wrap(deleteResponse -> listener.onResponse(deleteResponse.isAcknowledged()), listener::onFailure) + listener.delegateFailureAndWrap((l, deleteResponse) -> l.onResponse(deleteResponse.isAcknowledged())) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 507e9dac6282d..27bd3c926d944 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -249,7 +249,7 @@ private void deleteModelSnapshots(List modelSnapshots, String job return; } JobDataDeleter deleter = new JobDataDeleter(client, jobId); - deleter.deleteModelSnapshots(modelSnapshots, ActionListener.wrap(bulkResponse -> { + deleter.deleteModelSnapshots(modelSnapshots, listener.delegateFailureAndWrap((l, bulkResponse) -> { auditor.info(jobId, Messages.getMessage(Messages.JOB_AUDIT_SNAPSHOTS_DELETED, modelSnapshots.size())); LOGGER.debug( () -> format( @@ -259,8 +259,8 @@ private void deleteModelSnapshots(List modelSnapshots, String job modelSnapshots.stream().map(ModelSnapshot::getDescription).collect(toList()) ) ); - listener.onResponse(true); - }, listener::onFailure)); + l.onResponse(true); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 654ce87fc5e30..35e16b9fa8b88 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -195,11 +195,11 @@ static void latestBucketTime(OriginSettingClient client, TaskId parentTaskId, St searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); searchRequest.setParentTask(parentTaskId); - client.search(searchRequest, ActionListener.wrap(response -> { + client.search(searchRequest, listener.delegateFailureAndWrap((delegate, response) -> { SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) { // no buckets found - listener.onResponse(null); + delegate.onResponse(null); } else { try ( @@ -210,12 +210,12 @@ static void latestBucketTime(OriginSettingClient client, TaskId parentTaskId, St ) ) { Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null); - listener.onResponse(bucket.getTimestamp().getTime()); + delegate.onResponse(bucket.getTimestamp().getTime()); } catch (IOException e) { - listener.onFailure(new ElasticsearchParseException("failed to parse bucket", e)); + delegate.onFailure(new ElasticsearchParseException("failed to parse bucket", e)); } } - }, listener::onFailure)); + })); } private void auditResultsWereDeleted(String jobId, long cutoffEpochMs) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 09cd6225cf0ca..c50e744bde96b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -398,18 +398,18 @@ private void stopAssociatedDatafeedForFailedJob(String jobId) { } private void getRunningDatafeed(String jobId, ActionListener listener) { - ActionListener> datafeedListener = ActionListener.wrap(datafeeds -> { + ActionListener> datafeedListener = listener.delegateFailureAndWrap((delegate, datafeeds) -> { assert datafeeds.size() <= 1; if (datafeeds.isEmpty()) { - listener.onResponse(null); + delegate.onResponse(null); return; } String datafeedId = datafeeds.iterator().next(); PersistentTasksCustomMetadata tasks = clusterState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE); PersistentTasksCustomMetadata.PersistentTask datafeedTask = MlTasks.getDatafeedTask(datafeedId, tasks); - listener.onResponse(datafeedTask != null ? datafeedId : null); - }, listener::onFailure); + delegate.onResponse(datafeedTask != null ? datafeedId : null); + }); datafeedConfigProvider.findDatafeedIdsForJobIds(Collections.singleton(jobId), datafeedListener); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java index b560a758b8e83..a452c156e77f1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java @@ -27,6 +27,7 @@ import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; +import org.mockito.Mockito; import org.mockito.stubbing.Answer; import java.util.Map; @@ -57,6 +58,7 @@ public void setUpTests() { client = mock(Client.class); OriginSettingClient originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN); listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); deleteIndexRequestCaptor = ArgumentCaptor.forClass(DeleteIndexRequest.class); remover = new EmptyStateIndexRemover(originSettingClient, new TaskId("test", 0L)); @@ -66,6 +68,7 @@ public void setUpTests() { public void verifyNoOtherInteractionsWithMocks() { verify(client).settings(); verify(client, atLeastOnce()).threadPool(); + verify(listener, Mockito.atLeast(0)).delegateFailureAndWrap(any()); verifyNoMoreInteractions(client, listener); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java index ad0719011c92e..39f1ead7e24e0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java @@ -60,6 +60,7 @@ public void setUpTests() { client = mock(Client.class); originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN); listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); } public void testRemove_GivenNoJobs() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 5aa5b847b26be..4dbb4eda07b0a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -60,6 +60,7 @@ public void setUpTests() { client = mock(Client.class); originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN); listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); } public void testRemove_GivenNoJobs() {