diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh index 1626255c30b4f..6b9938dabffa8 100755 --- a/.ci/scripts/packaging-test.sh +++ b/.ci/scripts/packaging-test.sh @@ -39,7 +39,7 @@ if [ -f "/etc/os-release" ] ; then # Work around incorrect lintian version # https://github.com/elastic/elasticsearch/issues/48573 if [ $VERSION_ID == 10 ] ; then - sudo apt-get update -y + sudo apt-get update -y || true sudo apt-get install -y --allow-downgrades lintian=2.15.0 fi fi diff --git a/docs/changelog/111684.yaml b/docs/changelog/111684.yaml new file mode 100644 index 0000000000000..32edb5723cb0a --- /dev/null +++ b/docs/changelog/111684.yaml @@ -0,0 +1,5 @@ +pr: 111684 +summary: Write downloaded model parts async +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/113051.yaml b/docs/changelog/113051.yaml new file mode 100644 index 0000000000000..9be68f9f2b03e --- /dev/null +++ b/docs/changelog/113051.yaml @@ -0,0 +1,5 @@ +pr: 113051 +summary: Add Search Inference ID To Semantic Text Mapping +area: Mapping +type: enhancement +issues: [] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 79b5f2b69f24d..24dbee8c2983b 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -6,10 +6,10 @@ include::links.asciidoc[] include::landing-page.asciidoc[] -include::intro.asciidoc[] - include::release-notes/highlights.asciidoc[] +include::intro.asciidoc[] + include::quickstart/index.asciidoc[] include::setup.asciidoc[] diff --git a/docs/reference/indices/index-templates.asciidoc b/docs/reference/indices/index-templates.asciidoc index 66911716ffee2..5b152ecf177ec 100644 --- a/docs/reference/indices/index-templates.asciidoc +++ b/docs/reference/indices/index-templates.asciidoc @@ -44,6 +44,7 @@ following index patterns: - `metrics-*-*` - `synthetics-*-*` - `profiling-*` +- `security_solution-*-*` // end::built-in-index-template-patterns[] {fleet-guide}/fleet-overview.html[{agent}] uses these templates to create diff --git a/docs/reference/intro.asciidoc b/docs/reference/intro.asciidoc index f80856368af2b..831888103c5c1 100644 --- a/docs/reference/intro.asciidoc +++ b/docs/reference/intro.asciidoc @@ -1,68 +1,98 @@ [[elasticsearch-intro]] -== What is {es}? +== {es} basics + +This guide covers the core concepts you need to understand to get started with {es}. +If you'd prefer to start working with {es} right away, set up a <> and jump to <>. + +This guide covers the following topics: + +* <>: Learn about {es} and some of its main use cases. +* <>: Understand your options for deploying {es} in different environments, including a fast local development setup. +* <>: Understand {es}'s most important primitives and how it stores data. +* <>: Understand your options for ingesting data into {es}. +* <>: Understand your options for searching and analyzing data in {es}. +* <>: Understand the basic concepts required for moving your {es} deployment to production. + +[[elasticsearch-intro-what-is-es]] +=== What is {es}? {es-repo}[{es}] is a distributed search and analytics engine, scalable data store, and vector database built on Apache Lucene. It's optimized for speed and relevance on production-scale workloads. Use {es} to search, index, store, and analyze data of all shapes and sizes in near real time. +{es} is the heart of the {estc-welcome-current}/stack-components.html[Elastic Stack]. +Combined with https://www.elastic.co/kibana[{kib}], it powers the following Elastic solutions: + +* https://www.elastic.co/observability[Observability] +* https://www.elastic.co/enterprise-search[Search] +* https://www.elastic.co/security[Security] + [TIP] ==== {es} has a lot of features. Explore the full list on the https://www.elastic.co/elasticsearch/features[product webpage^]. ==== -{es} is the heart of the {estc-welcome-current}/stack-components.html[Elastic Stack] and powers the Elastic https://www.elastic.co/enterprise-search[Search], https://www.elastic.co/observability[Observability] and https://www.elastic.co/security[Security] solutions. - -{es} is used for a wide and growing range of use cases. Here are a few examples: - -* *Monitor log and event data*: Store logs, metrics, and event data for observability and security information and event management (SIEM). -* *Build search applications*: Add search capabilities to apps or websites, or build search engines over internal data. -* *Vector database*: Store and search vectorized data, and create vector embeddings with built-in and third-party natural language processing (NLP) models. -* *Retrieval augmented generation (RAG)*: Use {es} as a retrieval engine to augment generative AI models. -* *Application and security monitoring*: Monitor and analyze application performance and security data. -* *Machine learning*: Use {ml} to automatically model the behavior of your data in real-time. - -This is just a sample of search, observability, and security use cases enabled by {es}. -Refer to our https://www.elastic.co/customers/success-stories[customer success stories] for concrete examples across a range of industries. -// Link to demos, search labs chatbots - [discrete] [[elasticsearch-intro-elastic-stack]] .What is the Elastic Stack? ******************************* {es} is the core component of the Elastic Stack, a suite of products for collecting, storing, searching, and visualizing data. -https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current/stack-components.html[Learn more about the Elastic Stack]. +{estc-welcome-current}/stack-components.html[Learn more about the Elastic Stack]. ******************************* -// TODO: Remove once we've moved Stack Overview to a subpage? [discrete] +[[elasticsearch-intro-use-cases]] +==== Use cases + +{es} is used for a wide and growing range of use cases. Here are a few examples: + +**Observability** + +* *Logs, metrics, and traces*: Collect, store, and analyze logs, metrics, and traces from applications, systems, and services. +* *Application performance monitoring (APM)*: Monitor and analyze the performance of business-critical software applications. +* *Real user monitoring (RUM)*: Monitor, quantify, and analyze user interactions with web applications. +* *OpenTelemetry*: Reuse your existing instrumentation to send telemetry data to the Elastic Stack using the OpenTelemetry standard. + +**Search** + +* *Full-text search*: Build a fast, relevant full-text search solution using inverted indexes, tokenization, and text analysis. +* *Vector database*: Store and search vectorized data, and create vector embeddings with built-in and third-party natural language processing (NLP) models. +* *Semantic search*: Understand the intent and contextual meaning behind search queries using tools like synonyms, dense vector embeddings, and learned sparse query-document expansion. +* *Hybrid search*: Combine full-text search with vector search using state-of-the-art ranking algorithms. +* *Build search experiences*: Add hybrid search capabilities to apps or websites, or build enterprise search engines over your organization's internal data sources. +* *Retrieval augmented generation (RAG)*: Use {es} as a retrieval engine to supplement generative AI models with more relevant, up-to-date, or proprietary data for a range of use cases. +* *Geospatial search*: Search for locations and calculate spatial relationships using geospatial queries. + +**Security** + +* *Security information and event management (SIEM)*: Collect, store, and analyze security data from applications, systems, and services. +* *Endpoint security*: Monitor and analyze endpoint security data. +* *Threat hunting*: Search and analyze data to detect and respond to security threats. + +This is just a sample of search, observability, and security use cases enabled by {es}. +Refer to Elastic https://www.elastic.co/customers/success-stories[customer success stories] for concrete examples across a range of industries. + [[elasticsearch-intro-deploy]] -=== Deployment options +=== Run {es} To use {es}, you need a running instance of the {es} service. -You can deploy {es} in various ways: +You can deploy {es} in various ways. -* <>: Get started quickly with a minimal local Docker setup. -* {cloud}/ec-getting-started-trial.html[*Elastic Cloud*]: {es} is available as part of our hosted Elastic Stack offering, deployed in the cloud with your provider of choice. Sign up for a https://cloud.elastic.co/registration[14-day free trial]. +**Quick start option** + +* <>: Get started quickly with a minimal local Docker setup for development and testing. + +**Hosted options** + +* {cloud}/ec-getting-started-trial.html[*Elastic Cloud Hosted*]: {es} is available as part of the hosted Elastic Stack offering, deployed in the cloud with your provider of choice. Sign up for a https://cloud.elastic.co/registration[14-day free trial]. * {serverless-docs}/general/sign-up-trial[*Elastic Cloud Serverless* (technical preview)]: Create serverless projects for autoscaled and fully managed {es} deployments. Sign up for a https://cloud.elastic.co/serverless-registration[14-day free trial]. -**Advanced deployment options** +**Advanced options** * <>: Install, configure, and run {es} on your own premises. * {ece-ref}/Elastic-Cloud-Enterprise-overview.html[*Elastic Cloud Enterprise*]: Deploy Elastic Cloud on public or private clouds, virtual machines, or your own premises. * {eck-ref}/k8s-overview.html[*Elastic Cloud on Kubernetes*]: Deploy Elastic Cloud on Kubernetes. -[discrete] -[[elasticsearch-next-steps]] -=== Learn more - -Here are some resources to help you get started: - -* <>: A beginner's guide to deploying your first {es} instance, indexing data, and running queries. -* https://elastic.co/webinars/getting-started-elasticsearch[Webinar: Introduction to {es}]: Register for our live webinars to learn directly from {es} experts. -* https://www.elastic.co/search-labs[Elastic Search Labs]: Tutorials and blogs that explore AI-powered search using the latest {es} features. -** Follow our tutorial https://www.elastic.co/search-labs/tutorials/search-tutorial/welcome[to build a hybrid search solution in Python]. -** Check out the https://github.com/elastic/elasticsearch-labs?tab=readme-ov-file#elasticsearch-examples--apps[`elasticsearch-labs` repository] for a range of Python notebooks and apps for various use cases. - // new html page [[documents-indices]] === Indices, documents, and fields @@ -73,20 +103,16 @@ Here are some resources to help you get started: The index is the fundamental unit of storage in {es}, a logical namespace for storing data that share similar characteristics. After you have {es} <>, you'll get started by creating an index to store your data. +An index is a collection of documents uniquely identified by a name or an <>. +This unique name is important because it's used to target the index in search queries and other operations. + [TIP] ==== A closely related concept is a <>. -This index abstraction is optimized for append-only time-series data, and is made up of hidden, auto-generated backing indices. -If you're working with time-series data, we recommend the {observability-guide}[Elastic Observability] solution. +This index abstraction is optimized for append-only timestamped data, and is made up of hidden, auto-generated backing indices. +If you're working with timestamped data, we recommend the {observability-guide}[Elastic Observability] solution for additional tools and optimized content. ==== -Some key facts about indices: - -* An index is a collection of documents -* An index has a unique name -* An index can also be referred to by an alias -* An index has a mapping that defines the schema of its documents - [discrete] [[elasticsearch-intro-documents-fields]] ==== Documents and fields @@ -126,14 +152,12 @@ A simple {es} document might look like this: [discrete] [[elasticsearch-intro-documents-fields-data-metadata]] -==== Data and metadata +==== Metadata fields -An indexed document contains data and metadata. +An indexed document contains data and metadata. <> are system fields that store information about the documents. In {es}, metadata fields are prefixed with an underscore. +For example, the following fields are metadata fields: -The most important metadata fields are: - -* `_source`: Contains the original JSON document. * `_index`: The name of the index where the document is stored. * `_id`: The document's ID. IDs must be unique per index. @@ -146,8 +170,8 @@ A mapping defines the <> for each field, how the field and how it should be stored. When adding documents to {es}, you have two options for mappings: -* <>: Let {es} automatically detect the data types and create the mappings for you. This is great for getting started quickly, but can lead to unexpected results for complex data. -* <>: Define the mappings up front by specifying data types for each field. Recommended for production use cases, because you have much more control over how your data is indexed. +* <>: Let {es} automatically detect the data types and create the mappings for you. Dynamic mapping helps you get started quickly, but might yield suboptimal results for your specific use case due to automatic field type inference. +* <>: Define the mappings up front by specifying data types for each field. Recommended for production use cases, because you have full control over how your data is indexed to suit your specific use case. [TIP] ==== @@ -155,81 +179,207 @@ You can use a combination of dynamic and explicit mapping on the same index. This is useful when you have a mix of known and unknown fields in your data. ==== +// New html page +[[es-ingestion-overview]] +=== Add data to {es} + +There are multiple ways to ingest data into {es}. +The option that you choose depends on whether you're working with timestamped data or non-timestamped data, where the data is coming from, its complexity, and more. + +[TIP] +==== +You can load {kibana-ref}/connect-to-elasticsearch.html#_add_sample_data[sample data] into your {es} cluster using {kib}, to get started quickly. +==== + +[discrete] +[[es-ingestion-overview-general-content]] +==== General content + +General content is data that does not have a timestamp. +This could be data like vector embeddings, website content, product catalogs, and more. +For general content, you have the following options for adding data to {es} indices: + +* <>: Use the {es} <> to index documents directly, using the Dev Tools {kibana-ref}/console-kibana.html[Console], or cURL. ++ +If you're building a website or app, then you can call Elasticsearch APIs using an https://www.elastic.co/guide/en/elasticsearch/client/index.html[{es} client] in the programming language of your choice. If you use the Python client, then check out the `elasticsearch-labs` repo for various https://github.com/elastic/elasticsearch-labs/tree/main/notebooks/search/python-examples[example notebooks]. +* {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[File upload]: Use the {kib} file uploader to index single files for one-off testing and exploration. The GUI guides you through setting up your index and field mappings. +* https://github.com/elastic/crawler[Web crawler]: Extract and index web page content into {es} documents. +* {enterprise-search-ref}/connectors.html[Connectors]: Sync data from various third-party data sources to create searchable, read-only replicas in {es}. + +[discrete] +[[es-ingestion-overview-timestamped]] +==== Timestamped data + +Timestamped data in {es} refers to datasets that include a timestamp field. If you use the {ecs-ref}/ecs-reference.html[Elastic Common Schema (ECS)], this field is named `@timestamp`. +This could be data like logs, metrics, and traces. + +For timestamped data, you have the following options for adding data to {es} data streams: + +* {fleet-guide}/fleet-overview.html[Elastic Agent and Fleet]: The preferred way to index timestamped data. Each Elastic Agent based integration includes default ingestion rules, dashboards, and visualizations to start analyzing your data right away. +You can use the Fleet UI in {kib} to centrally manage Elastic Agents and their policies. +* {beats-ref}/beats-reference.html[Beats]: If your data source isn't supported by Elastic Agent, use Beats to collect and ship data to Elasticsearch. You install a separate Beat for each type of data to collect. +* {logstash-ref}/introduction.html[Logstash]: Logstash is an open source data collection engine with real-time pipelining capabilities that supports a wide variety of data sources. You might use this option because neither Elastic Agent nor Beats supports your data source. You can also use Logstash to persist incoming data, or if you need to send the data to multiple destinations. +* {cloud}/ec-ingest-guides.html[Language clients]: The linked tutorials demonstrate how to use {es} programming language clients to ingest data from an application. In these examples, {es} is running on Elastic Cloud, but the same principles apply to any {es} deployment. + +[TIP] +==== +If you're interested in data ingestion pipelines for timestamped data, use the decision tree in the {cloud}/ec-cloud-ingest-data.html#ec-data-ingest-pipeline[Elastic Cloud docs] to understand your options. +==== + // New html page [[search-analyze]] -=== Search and analyze +=== Search and analyze data -While you can use {es} as a document store and retrieve documents and their -metadata, the real power comes from being able to easily access the full suite -of search capabilities built on the Apache Lucene search engine library. +You can use {es} as a basic document store to retrieve documents and their +metadata. +However, the real power of {es} comes from its advanced search and analytics capabilities. -{es} provides a simple, coherent REST API for managing your cluster and indexing -and searching your data. For testing purposes, you can easily submit requests -directly from the command line or through the Developer Console in {kib}. From -your applications, you can use the -https://www.elastic.co/guide/en/elasticsearch/client/index.html[{es} client] -for your language of choice: Java, JavaScript, Go, .NET, PHP, Perl, Python -or Ruby. +You'll use a combination of an API endpoint and a query language to interact with your data. [discrete] -[[search-data]] -==== Searching your data - -The {es} REST APIs support structured queries, full text queries, and complex -queries that combine the two. Structured queries are -similar to the types of queries you can construct in SQL. For example, you -could search the `gender` and `age` fields in your `employee` index and sort the -matches by the `hire_date` field. Full-text queries find all documents that -match the query string and return them sorted by _relevance_—how good a -match they are for your search terms. - -In addition to searching for individual terms, you can perform phrase searches, -similarity searches, and prefix searches, and get autocomplete suggestions. - -Have geospatial or other numerical data that you want to search? {es} indexes -non-textual data in optimized data structures that support -high-performance geo and numerical queries. - -You can access all of these search capabilities using {es}'s -comprehensive JSON-style query language (<>). You can also -construct <> to search and aggregate data -natively inside {es}, and JDBC and ODBC drivers enable a broad range of -third-party applications to interact with {es} via SQL. +[[search-analyze-rest-api]] +==== REST API + +Use REST APIs to manage your {es} cluster, and to index +and search your data. +For testing purposes, you can submit requests +directly from the command line or through the Dev Tools {kibana-ref}/console-kibana.html[Console] in {kib}. +From your applications, you can use a +https://www.elastic.co/guide/en/elasticsearch/client/index.html[client] +in your programming language of choice. + +Refer to <> for a hands-on example of using the `_search` endpoint, adding data to {es}, and running basic searches in Query DSL syntax. [discrete] -[[analyze-data]] -==== Analyzing your data +[[search-analyze-query-languages]] +==== Query languages + +{es} provides a number of query languages for interacting with your data. + +*Query DSL* is the primary query language for {es} today. + +*{esql}* is a new piped query language and compute engine which was first added in version *8.11*. + +{esql} does not yet support all the features of Query DSL, like full-text search and semantic search. +Look forward to new {esql} features and functionalities in each release. + +Refer to <> for a full overview of the query languages available in {es}. + +[discrete] +[[search-analyze-query-dsl]] +===== Query DSL + +<> is a full-featured JSON-style query language that enables complex searching, filtering, and aggregations. +It is the original and most powerful query language for {es} today. + +The <> accepts queries written in Query DSL syntax. + +[discrete] +[[search-analyze-query-dsl-search-filter]] +====== Search and filter with Query DSL + +Query DSL support a wide range of search techniques, including the following: + +* <>: Search text that has been analyzed and indexed to support phrase or proximity queries, fuzzy matches, and more. +* <>: Search for exact matches using `keyword` fields. +* <>: Search `semantic_text` fields using dense or sparse vector search on embeddings generated in your {es} cluster. +* <>: Search for similar dense vectors using the kNN algorithm for embeddings generated outside of {es}. +* <>: Search for locations and calculate spatial relationships using geospatial queries. -{es} aggregations enable you to build complex summaries of your data and gain -insight into key metrics, patterns, and trends. Instead of just finding the -proverbial “needle in a haystack”, aggregations enable you to answer questions -like: +Learn about the full range of queries supported by <>. -* How many needles are in the haystack? -* What is the average length of the needles? -* What is the median length of the needles, broken down by manufacturer? -* How many needles were added to the haystack in each of the last six months? +You can also filter data using Query DSL. +Filters enable you to include or exclude documents by retrieving documents that match specific field-level criteria. +A query that uses the `filter` parameter indicates <>. -You can also use aggregations to answer more subtle questions, such as: +[discrete] +[[search-analyze-data-query-dsl]] +====== Analyze with Query DSL -* What are your most popular needle manufacturers? -* Are there any unusual or anomalous clumps of needles? +<> are the primary tool for analyzing {es} data using Query DSL. +Aggregrations enable you to build complex summaries of your data and gain +insight into key metrics, patterns, and trends. -Because aggregations leverage the same data-structures used for search, they are +Because aggregations leverage the same data structures used for search, they are also very fast. This enables you to analyze and visualize your data in real time. -Your reports and dashboards update as your data changes so you can take action -based on the latest information. +You can search documents, filter results, and perform analytics at the same time, on the same +data, in a single request. +That means aggregations are calculated in the context of the search query. + +The folowing aggregation types are available: + +* <>: Calculate metrics, +such as a sum or average, from field values. +* <>: Group documents into buckets based on field values, ranges, +or other criteria. +* <>: Run aggregations on the results of other aggregations. + +Run aggregations by specifying the <>'s `aggs` parameter. +Learn more in <>. + +[discrete] +[[search-analyze-data-esql]] +===== {esql} -What’s more, aggregations operate alongside search requests. You can search -documents, filter results, and perform analytics at the same time, on the same -data, in a single request. And because aggregations are calculated in the -context of a particular search, you’re not just displaying a count of all -size 70 needles, you’re displaying a count of the size 70 needles -that match your users' search criteria--for example, all size 70 _non-stick -embroidery_ needles. +<> is a piped query language for filtering, transforming, and analyzing data. +{esql} is built on top of a new compute engine, where search, aggregation, and transformation functions are +directly executed within {es} itself. +{esql} syntax can also be used within various {kib} tools. + +The <> accepts queries written in {esql} syntax. + +Today, it supports a subset of the features available in Query DSL, like aggregations, filters, and transformations. +It does not yet support full-text search or semantic search. + +It comes with a comprehensive set of <> for working with data and has robust integration with {kib}'s Discover, dashboards and visualizations. + +Learn more in <>, or try https://www.elastic.co/training/introduction-to-esql[our training course]. + +[discrete] +[[search-analyze-data-query-languages-table]] +==== List of available query languages +The following table summarizes all available {es} query languages, to help you choose the right one for your use case. + +[cols="1,2,2,1", options="header"] +|=== +| Name | Description | Use cases | API endpoint + +| <> +| The primary query language for {es}. A powerful and flexible JSON-style language that enables complex queries. +| Full-text search, semantic search, keyword search, filtering, aggregations, and more. +| <> + + +| <> +| Introduced in *8.11*, the Elasticsearch Query Language ({esql}) is a piped query language language for filtering, transforming, and analyzing data. +| Initially tailored towards working with time series data like logs and metrics. +Robust integration with {kib} for querying, visualizing, and analyzing data. +Does not yet support full-text search. +| <> + + +| <> +| Event Query Language (EQL) is a query language for event-based time series data. Data must contain the `@timestamp` field to use EQL. +| Designed for the threat hunting security use case. +| <> + +| <> +| Allows native, real-time SQL-like querying against {es} data. JDBC and ODBC drivers are available for integration with business intelligence (BI) tools. +| Enables users familiar with SQL to query {es} data using familiar syntax for BI and reporting. +| <> + +| {kibana-ref}/kuery-query.html[Kibana Query Language (KQL)] +| Kibana Query Language (KQL) is a text-based query language for filtering data when you access it through the {kib} UI. +| Use KQL to filter documents where a value for a field exists, matches a given value, or is within a given range. +| N/A + +|=== + +// New html page +// TODO: this page won't live here long term [[scalability]] -=== Scalability and resilience +=== Plan for production {es} is built to be always available and to scale with your needs. It does this by being distributed by nature. You can add servers (nodes) to a cluster to diff --git a/docs/reference/landing-page.asciidoc b/docs/reference/landing-page.asciidoc index e781dc0aff4e3..f1b5ce8210996 100644 --- a/docs/reference/landing-page.asciidoc +++ b/docs/reference/landing-page.asciidoc @@ -62,7 +62,7 @@ Elasticsearch is the search and analytics engine that powers the Elastic Stack.

- +

diff --git a/docs/reference/quickstart/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc index 6b3095e07f9d4..e674dda147bcc 100644 --- a/docs/reference/quickstart/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -1,47 +1,20 @@ [[getting-started]] -== Quick start guide +== Quick start: Add data using Elasticsearch APIs +++++ +Basics: Add data using APIs +++++ -This guide helps you learn how to: +In this quick start guide, you'll learn how to do the following tasks: -* Run {es} and {kib} (using {ecloud} or in a local Docker dev environment), -* add simple (non-timestamped) dataset to {es}, -* run basic searches. - -[TIP] -==== -If you're interested in using {es} with Python, check out Elastic Search Labs. This is the best place to explore AI-powered search use cases, such as working with embeddings, vector search, and retrieval augmented generation (RAG). - -* https://www.elastic.co/search-labs/tutorials/search-tutorial/welcome[Tutorial]: this walks you through building a complete search solution with {es}, from the ground up. -* https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs` repository]: it contains a range of Python https://github.com/elastic/elasticsearch-labs/tree/main/notebooks[notebooks] and https://github.com/elastic/elasticsearch-labs/tree/main/example-apps[example apps]. -==== - -[discrete] -[[run-elasticsearch]] -=== Run {es} - -The simplest way to set up {es} is to create a managed deployment with {ess} on -{ecloud}. If you prefer to manage your own test environment, install and -run {es} using Docker. - -include::{es-ref-dir}/tab-widgets/code.asciidoc[] -include::{es-ref-dir}/tab-widgets/quick-start-install-widget.asciidoc[] - -[discrete] -[[send-requests-to-elasticsearch]] -=== Send requests to {es} - -You send data and other requests to {es} using REST APIs. This lets you interact -with {es} using any client that sends HTTP requests, such as -https://curl.se[curl]. You can also use {kib}'s Console to send requests to -{es}. - -include::{es-ref-dir}/tab-widgets/api-call-widget.asciidoc[] +* Add a small, non-timestamped dataset to {es} using Elasticsearch REST APIs. +* Run basic searches. [discrete] [[add-data]] === Add data -You add data to {es} as JSON objects called documents. {es} stores these +You add data to {es} as JSON objects called documents. +{es} stores these documents in searchable indices. [discrete] @@ -58,6 +31,13 @@ The request automatically creates the index. PUT books ---- // TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE books +-------------------------------------------------- +// TEARDOWN + //// [source,console] @@ -236,10 +216,11 @@ JSON object submitted during indexing. [[qs-match-query]] ==== `match` query -You can use the `match` query to search for documents that contain a specific value in a specific field. +You can use the <> to search for documents that contain a specific value in a specific field. This is the standard query for performing full-text search, including fuzzy matching and phrase searches. Run the following command to search the `books` index for documents containing `brave` in the `name` field: + [source,console] ---- GET books/_search @@ -251,34 +232,4 @@ GET books/_search } } ---- -// TEST[continued] - -[discrete] -[[whats-next]] -=== Next steps - -Now that {es} is up and running and you've learned the basics, you'll probably want to test out larger datasets, or index your own data. - -[discrete] -[[whats-next-search-learn-more]] -==== Learn more about search queries - -* <>. Jump here to learn about exact value search, full-text search, vector search, and more, using the <>. - -[discrete] -[[whats-next-more-data]] -==== Add more data - -* Learn how to {kibana-ref}/sample-data.html[install sample data] using {kib}. This is a quick way to test out {es} on larger workloads. -* Learn how to use the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[upload data UI] in {kib} to add your own CSV, TSV, or JSON files. -* Use the https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html[bulk API] to ingest your own datasets to {es}. - -[discrete] -[[whats-next-client-libraries]] -==== {es} programming language clients - -* Check out our https://www.elastic.co/guide/en/elasticsearch/client/index.html[client library] to work with your {es} instance in your preferred programming language. -* If you're using Python, check out https://www.elastic.co/search-labs[Elastic Search Labs] for a range of examples that use the {es} Python client. This is the best place to explore AI-powered search use cases, such as working with embeddings, vector search, and retrieval augmented generation (RAG). -** This extensive, hands-on https://www.elastic.co/search-labs/tutorials/search-tutorial/welcome[tutorial] -walks you through building a complete search solution with {es}, from the ground up. -** https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs`] contains a range of executable Python https://github.com/elastic/elasticsearch-labs/tree/main/notebooks[notebooks] and https://github.com/elastic/elasticsearch-labs/tree/main/example-apps[example apps]. \ No newline at end of file +// TEST[continued] \ No newline at end of file diff --git a/docs/reference/quickstart/index.asciidoc b/docs/reference/quickstart/index.asciidoc index e517d039e620b..6bfed4c198c75 100644 --- a/docs/reference/quickstart/index.asciidoc +++ b/docs/reference/quickstart/index.asciidoc @@ -1,10 +1,29 @@ [[quickstart]] -= Quickstart += Quick starts -Get started quickly with {es}. +Use these quick starts to get hands-on experience with the {es} APIs. +Unless otherwise noted, these examples will use queries written in <> syntax. -* Learn how to run {es} (and {kib}) for <>. -* Follow our <> to add data to {es} and query it. +[discrete] +[[quickstart-requirements]] +== Requirements -include::run-elasticsearch-locally.asciidoc[] -include::getting-started.asciidoc[] +You'll need a running {es} cluster, together with {kib} to use the Dev Tools API Console. +Get started <> , or see our <>. + +[discrete] +[[quickstart-list]] +== Hands-on quick starts + +* <>. Learn how to add data to {es} and perform basic searches. + +[discrete] +[[quickstart-python-links]] +== Working in Python + +If you're interested in using {es} with Python, check out Elastic Search Labs: + +* https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs` repository]: Contains a range of Python https://github.com/elastic/elasticsearch-labs/tree/main/notebooks[notebooks] and https://github.com/elastic/elasticsearch-labs/tree/main/example-apps[example apps]. +* https://www.elastic.co/search-labs/tutorials/search-tutorial/welcome[Tutorial]: This walks you through building a complete search solution with {es} from the ground up using Flask. + +include::getting-started.asciidoc[] \ No newline at end of file diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/run-elasticsearch-locally.asciidoc similarity index 68% rename from docs/reference/quickstart/run-elasticsearch-locally.asciidoc rename to docs/reference/run-elasticsearch-locally.asciidoc index 24e0f3f22350e..64bcd3d066529 100644 --- a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc +++ b/docs/reference/run-elasticsearch-locally.asciidoc @@ -1,7 +1,7 @@ [[run-elasticsearch-locally]] -== Run {es} locally in Docker (without security) +== Run {es} locally in Docker ++++ -Local dev setup (Docker) +Run {es} locally ++++ [WARNING] @@ -9,24 +9,13 @@ *DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS* The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. -While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. +While this approach is convenient for experimenting and learning, you should never run Elasticsearch in this way in a production environment. ==== -The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. -Note that if you don't need the {kib} UI, you can skip those instructions. +Follow this tutorial if you want to quickly set up {es} in Docker for local development or testing. -[discrete] -[[local-dev-why]] -=== When would I use this setup? - -Use this setup if you want to quickly spin up {es} (and {kib}) for local development or testing. - -For example you might: - -* Want to run a quick test to see how a feature works. -* Follow a tutorial or guide that requires an {es} cluster, like our <>. -* Experiment with the {es} APIs using different tools, like the Dev Tools Console, cURL, or an Elastic programming language client. -* Quickly spin up an {es} cluster to test an executable https://github.com/elastic/elasticsearch-labs/tree/main/notebooks#readme[Python notebook] locally. +This tutorial also includes instructions for installing {kib}. + If you don't need access to the {kib} UI, then you can skip those instructions. [discrete] [[local-dev-prerequisites]] @@ -118,12 +107,12 @@ When you access {kib}, use `elastic` as the username and the password you set ea [NOTE] ==== -The service is started with a trial license. The trial license enables all features of Elasticsearch for a trial period of 30 days. After the trial period expires, the license is downgraded to a basic license, which is free forever. If you prefer to skip the trial and use the basic license, set the value of the `xpack.license.self_generated.type` variable to basic instead. For a detailed feature comparison between the different licenses, refer to our https://www.elastic.co/subscriptions[subscriptions page]. +The service is started with a trial license. The trial license enables all features of Elasticsearch for a trial period of 30 days. After the trial period expires, the license is downgraded to a basic license, which is free forever. ==== [discrete] [[local-dev-connecting-clients]] -== Connecting to {es} with language clients +=== Connect to {es} with language clients To connect to the {es} cluster from a language client, you can use basic authentication with the `elastic` username and the password you set in the environment variable. @@ -172,12 +161,11 @@ curl -u elastic:$ELASTIC_PASSWORD \ [[local-dev-next-steps]] === Next steps -Use our <> to learn the basics of {es}: how to add data and query it. +Use our <> to learn the basics of {es}. [discrete] [[local-dev-production]] === Moving to production -This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). - -Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. +This setup is not suitable for production use. +Refer to <> for more information. \ No newline at end of file diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index b346fddc5e5a1..a284e563917c3 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -27,6 +27,8 @@ the only resource-intensive application on the host or container. For example, you might run {metricbeat} alongside {es} for cluster statistics, but a resource-heavy {ls} deployment should be on its own host. +include::run-elasticsearch-locally.asciidoc[] + include::setup/install.asciidoc[] include::setup/configuration.asciidoc[] diff --git a/docs/reference/tab-widgets/api-call.asciidoc b/docs/reference/tab-widgets/api-call.asciidoc index bb6b89374075d..5e70d73684436 100644 --- a/docs/reference/tab-widgets/api-call.asciidoc +++ b/docs/reference/tab-widgets/api-call.asciidoc @@ -1,5 +1,5 @@ // tag::cloud[] -**Use {kib}** +**Option 1: Use {kib}** //tag::kibana-api-ex[] . Open {kib}'s main menu ("*☰*" near Elastic logo) and go to **Dev Tools > Console**. @@ -16,9 +16,9 @@ GET / //end::kibana-api-ex[] -**Use curl** +**Option 2: Use `curl`** -To communicate with {es} using curl or another client, you need your cluster's +To communicate with {es} using `curl` or another client, you need your cluster's endpoint. . Open {kib}'s main menu and click **Manage this deployment**. @@ -26,7 +26,7 @@ endpoint. . From your deployment menu, go to the **Elasticsearch** page. Click **Copy endpoint**. -. To submit an example API request, run the following curl command in a new +. To submit an example API request, run the following `curl` command in a new terminal session. Replace `` with the password for the `elastic` user. Replace `` with your endpoint. + diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java index dd6ed04f20378..ac73385a97d70 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java @@ -502,7 +502,7 @@ static void putComposableIndexTemplate(String id, List patterns, @Nullab request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, null, null, null)) + .template(Template.builder().settings(settings)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() ); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index e99f5be0a1e6b..8e7ecfa49f144 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -2441,7 +2441,13 @@ static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), aliases, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .aliases(aliases) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, withFailureStore)) .build() diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java index 59a8991e28195..4c85958498da0 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/ResolveClusterDataStreamIT.java @@ -453,7 +453,7 @@ void putComposableIndexTemplate(Client client, String id, List patterns, request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(null, null, aliases, null)) + .template(Template.builder().aliases(aliases)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() ); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java index 3eb7ab7a55494..dd3f1e74d4f4e 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudSystemDataStreamLifecycleIT.java @@ -201,12 +201,10 @@ public Collection getSystemDataStreamDescriptors() { ComposableIndexTemplate.builder() .indexPatterns(List.of(".test-data-stream")) .template( - new Template( - Settings.EMPTY, - mappings, - null, - DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() - ) + Template.builder() + .settings(Settings.EMPTY) + .mappings(mappings) + .lifecycle(DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build()) ) .dataStreamTemplate(new DataStreamTemplate()) .build(), diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 1168bbc904c40..89c440f5edf8b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -345,7 +345,7 @@ public void testOriginationDate() throws Exception { request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(List.of("index_*")) - .template(new Template(null, CompressedXContent.fromJSON(mapping), null, null)) + .template(Template.builder().mappings(CompressedXContent.fromJSON(mapping))) .build() ); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); @@ -1221,7 +1221,12 @@ static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, withFailureStore)) .build() @@ -1268,14 +1273,12 @@ public Collection getSystemDataStreamDescriptors() { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .indexPatterns(List.of(DataStream.BACKING_INDEX_PREFIX + SYSTEM_DATA_STREAM_NAME + "*")) .template( - new Template( - Settings.EMPTY, - null, - null, - DataStreamLifecycle.newBuilder() - .dataRetention(TimeValue.timeValueDays(SYSTEM_DATA_STREAM_RETENTION_DAYS)) - .build() - ) + Template.builder() + .settings(Settings.EMPTY) + .lifecycle( + DataStreamLifecycle.newBuilder() + .dataRetention(TimeValue.timeValueDays(SYSTEM_DATA_STREAM_RETENTION_DAYS)) + ) ) .build(), Map.of(), diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java index 48cb0321675a6..3c100d9cfe615 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java @@ -445,7 +445,12 @@ static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java index e94385b2b6409..0ab105a467ab3 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleFixtures.java @@ -114,7 +114,12 @@ static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() diff --git a/modules/dot-prefix-validation/build.gradle b/modules/dot-prefix-validation/build.gradle index b3ad3dd4aa01f..6e232570b4a22 100644 --- a/modules/dot-prefix-validation/build.gradle +++ b/modules/dot-prefix-validation/build.gradle @@ -23,3 +23,7 @@ restResources { tasks.named('yamlRestTest') { usesDefaultDistribution() } + +tasks.named('yamlRestCompatTest') { + usesDefaultDistribution() +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index 6a5cf8a95bf97..fe591387e9b35 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.bulk.BulkRequest; @@ -26,6 +25,7 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -64,7 +64,7 @@ import static java.lang.Math.min; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; -import static org.elasticsearch.action.bulk.BackoffPolicy.exponentialBackoff; +import static org.elasticsearch.common.BackoffPolicy.exponentialBackoff; import static org.elasticsearch.core.TimeValue.timeValueNanos; import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.MAX_DOCS_ALL_MATCHES; import static org.elasticsearch.rest.RestStatus.CONFLICT; diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java index 371d3488c3099..91ce987ff78c5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -33,6 +32,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSource.java index 29204b5bb0163..5c3db5aaa6cda 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSource.java @@ -18,12 +18,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java index e047b2cd0675b..83e4695829373 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse.Result; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.bulk.BulkRequest; @@ -47,6 +46,7 @@ import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -105,7 +105,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.synchronizedSet; import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; -import static org.elasticsearch.action.bulk.BackoffPolicy.constantBackoff; +import static org.elasticsearch.common.BackoffPolicy.constantBackoff; import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.core.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.contains; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java index ee65b980c5fc8..1c104cbd08197 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; @@ -22,6 +21,7 @@ import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.support.AbstractClient; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java index 9c30ba07069f9..fa0e1d22f4556 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java @@ -12,11 +12,11 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java index 3224a8429792d..c91b2e448bf7d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java @@ -29,10 +29,10 @@ import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.HeapBufferedAsyncResponseConsumer; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.FileSystemUtils; diff --git a/muted-tests.yml b/muted-tests.yml index 295f9ee7acd78..381011301f533 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -212,8 +212,6 @@ tests: - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testResponse issue: https://github.com/elastic/elasticsearch/issues/113148 -- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/113152 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test30StartStop issue: https://github.com/elastic/elasticsearch/issues/113160 @@ -223,9 +221,6 @@ tests: - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests method: testErrorMidStream issue: https://github.com/elastic/elasticsearch/issues/113179 -- class: org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests - method: testHasPrivilegesOtherThanIndex - issue: https://github.com/elastic/elasticsearch/issues/113202 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113054 @@ -241,15 +236,6 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests method: "testFold {TestCase= #2}" issue: https://github.com/elastic/elasticsearch/issues/113225 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testGetMappings - issue: https://github.com/elastic/elasticsearch/issues/113260 -- class: org.elasticsearch.xpack.security.authz.SecurityScrollTests - method: testSearchAndClearScroll - issue: https://github.com/elastic/elasticsearch/issues/113285 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testGetIndex - issue: https://github.com/elastic/elasticsearch/issues/113311 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test81JavaOptsInJvmOptions issue: https://github.com/elastic/elasticsearch/issues/113313 @@ -265,27 +251,15 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_force_delete/Test force deleting a running transform} issue: https://github.com/elastic/elasticsearch/issues/113327 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testValidateQuery - issue: https://github.com/elastic/elasticsearch/issues/113328 - class: org.elasticsearch.xpack.security.support.SecurityIndexManagerIntegTests method: testOnIndexAvailableForSearchIndexAlreadyAvailable issue: https://github.com/elastic/elasticsearch/issues/113336 -- class: org.elasticsearch.xpack.security.authz.SecurityScrollTests - method: testScrollIsPerUser - issue: https://github.com/elastic/elasticsearch/issues/113338 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=analytics/top_metrics/sort by scaled float field} issue: https://github.com/elastic/elasticsearch/issues/113340 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testFieldMappings - issue: https://github.com/elastic/elasticsearch/issues/113341 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/ccr/apis/follow/post-resume-follow/line_84} issue: https://github.com/elastic/elasticsearch/issues/113343 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testSearchAndMSearch - issue: https://github.com/elastic/elasticsearch/issues/113345 - class: org.elasticsearch.action.bulk.IncrementalBulkIT method: testBulkLevelBulkFailureAfterFirstIncrementalRequest issue: https://github.com/elastic/elasticsearch/issues/113365 @@ -307,6 +281,23 @@ tests: - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testOutOfOrderData issue: https://github.com/elastic/elasticsearch/issues/113477 +- class: org.elasticsearch.xpack.ml.integration.MlJobIT + method: testCreateJobsWithIndexNameOption + issue: https://github.com/elastic/elasticsearch/issues/113528 +- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT + method: test {p0=dot_prefix/10_basic/Deprecated index template with a dot prefix index pattern} + issue: https://github.com/elastic/elasticsearch/issues/113529 +- class: org.elasticsearch.xpack.esql.expression.function.fulltext.QueryStringFunctionTests + issue: https://github.com/elastic/elasticsearch/issues/113496 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates} + issue: https://github.com/elastic/elasticsearch/issues/113537 +- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT + method: test {p0=esql/70_locale/Date format with default locale} + issue: https://github.com/elastic/elasticsearch/issues/113539 +- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT + method: test {p0=esql/70_locale/Date format with Italian locale} + issue: https://github.com/elastic/elasticsearch/issues/113540 # Examples: # diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index d4ffad33d2314..37904e9f639ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.TimeValue; diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index f60a595d94c11..507fef10a5f44 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -159,7 +159,6 @@ exports org.elasticsearch.client.internal.support; exports org.elasticsearch.client.internal.transport; exports org.elasticsearch.cluster; - exports org.elasticsearch.cluster.ack; exports org.elasticsearch.cluster.action.index; exports org.elasticsearch.cluster.action.shard; exports org.elasticsearch.cluster.block; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2cc50a85668c7..6b1d73a58c871 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -223,6 +223,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_AGGREGATION_OPERATOR_STATUS_FINISH_NANOS = def(8_747_00_0); public static final TransportVersion ML_TELEMETRY_MEMORY_ADDED = def(8_748_00_0); public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE = def(8_749_00_0); + public static final TransportVersion SEMANTIC_TEXT_SEARCH_INFERENCE_ID = def(8_750_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java index 4a8a114aa7438..fb5267ba87b75 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java @@ -78,7 +78,7 @@ public static ComponentTemplate normalizeComponentTemplate( Settings.Builder builder = Settings.builder().put(template.settings()).normalizePrefix(IndexMetadata.INDEX_SETTING_PREFIX); Settings settings = builder.build(); indexScopedSettings.validate(settings, true); - template = new Template(settings, template.mappings(), template.aliases(), template.lifecycle()); + template = Template.builder(template).settings(settings).build(); componentTemplate = new ComponentTemplate( template, componentTemplate.version(), diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 967ec1529ae26..8d39644bbf5b2 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -165,7 +166,7 @@ public Builder setGlobalPipeline(String globalPipeline) { * * The default is to back off exponentially. * - * @see org.elasticsearch.action.bulk.BackoffPolicy#exponentialBackoff() + * @see BackoffPolicy#exponentialBackoff() */ public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) { if (backoffPolicy == null) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index 9da9cac0712d1..c005799ac99c0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.threadpool.Scheduler; import java.util.concurrent.CountDownLatch; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java index 574799881369e..4ebb5ecae8516 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 321cc394ea809..f1aab9501051e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -16,13 +16,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Predicates; import org.elasticsearch.index.mapper.TimeSeriesParams; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -46,18 +43,17 @@ */ public class FieldCapabilities implements Writeable, ToXContentObject { - private static final ParseField TYPE_FIELD = new ParseField("type"); - private static final ParseField IS_METADATA_FIELD = new ParseField("metadata_field"); - private static final ParseField SEARCHABLE_FIELD = new ParseField("searchable"); - private static final ParseField AGGREGATABLE_FIELD = new ParseField("aggregatable"); - private static final ParseField TIME_SERIES_DIMENSION_FIELD = new ParseField(TIME_SERIES_DIMENSION_PARAM); - private static final ParseField TIME_SERIES_METRIC_FIELD = new ParseField(TIME_SERIES_METRIC_PARAM); - private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField NON_SEARCHABLE_INDICES_FIELD = new ParseField("non_searchable_indices"); - private static final ParseField NON_AGGREGATABLE_INDICES_FIELD = new ParseField("non_aggregatable_indices"); - private static final ParseField NON_DIMENSION_INDICES_FIELD = new ParseField("non_dimension_indices"); - private static final ParseField METRIC_CONFLICTS_INDICES_FIELD = new ParseField("metric_conflicts_indices"); - private static final ParseField META_FIELD = new ParseField("meta"); + public static final ParseField TYPE_FIELD = new ParseField("type"); + public static final ParseField IS_METADATA_FIELD = new ParseField("metadata_field"); + public static final ParseField SEARCHABLE_FIELD = new ParseField("searchable"); + public static final ParseField AGGREGATABLE_FIELD = new ParseField("aggregatable"); + public static final ParseField TIME_SERIES_DIMENSION_FIELD = new ParseField(TIME_SERIES_DIMENSION_PARAM); + public static final ParseField TIME_SERIES_METRIC_FIELD = new ParseField(TIME_SERIES_METRIC_PARAM); + public static final ParseField INDICES_FIELD = new ParseField("indices"); + public static final ParseField NON_SEARCHABLE_INDICES_FIELD = new ParseField("non_searchable_indices"); + public static final ParseField NON_AGGREGATABLE_INDICES_FIELD = new ParseField("non_aggregatable_indices"); + public static final ParseField NON_DIMENSION_INDICES_FIELD = new ParseField("non_dimension_indices"); + public static final ParseField METRIC_CONFLICTS_INDICES_FIELD = new ParseField("metric_conflicts_indices"); private final String name; private final String type; @@ -312,37 +308,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static FieldCapabilities fromXContent(String name, XContentParser parser) throws IOException { - return PARSER.parse(parser, name); - } - - private static final InstantiatingObjectParser PARSER; - - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "field_capabilities", - true, - FieldCapabilities.class - ); - parser.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD); - parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), IS_METADATA_FIELD); - parser.declareBoolean(ConstructingObjectParser.constructorArg(), SEARCHABLE_FIELD); - parser.declareBoolean(ConstructingObjectParser.constructorArg(), AGGREGATABLE_FIELD); - parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_DIMENSION_FIELD); - parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_METRIC_FIELD); - parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD); - parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_SEARCHABLE_INDICES_FIELD); - parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_AGGREGATABLE_INDICES_FIELD); - parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_DIMENSION_INDICES_FIELD); - parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), METRIC_CONFLICTS_INDICES_FIELD); - parser.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, context) -> p.map(HashMap::new, v -> Set.copyOf(v.list())), - META_FIELD - ); - PARSER = parser.build(); - } - /** * The name of the field. */ diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java index cc2ea2a4ed57f..1153633ecf595 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java @@ -13,12 +13,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -28,8 +25,8 @@ public class FieldCapabilitiesFailure implements Writeable, ToXContentObject { - private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField FAILURE_FIELD = new ParseField("failure"); + public static final ParseField INDICES_FIELD = new ParseField("indices"); + public static final ParseField FAILURE_FIELD = new ParseField("failure"); private final List indices; private final Exception exception; @@ -58,30 +55,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_capabilities_failure", - true, - a -> { - return new FieldCapabilitiesFailure(((List) a[0]).toArray(String[]::new), (Exception) a[1]); - } - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.currentToken(), p); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); - Exception e = ElasticsearchException.failureFromXContent(p); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); - return e; - }, FAILURE_FIELD); - } - - public static FieldCapabilitiesFailure fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(indices); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index e5c2678bbf38e..e0f54aeef72ea 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -15,31 +15,25 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; /** * Response for {@link FieldCapabilitiesRequest} requests. */ public class FieldCapabilitiesResponse extends ActionResponse implements ChunkedToXContentObject { - private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField FIELDS_FIELD = new ParseField("fields"); + public static final ParseField INDICES_FIELD = new ParseField("indices"); + public static final ParseField FIELDS_FIELD = new ParseField("fields"); private static final ParseField FAILED_INDICES_FIELD = new ParseField("failed_indices"); - private static final ParseField FAILURES_FIELD = new ParseField("failures"); + public static final ParseField FAILURES_FIELD = new ParseField("failures"); private final String[] indices; private final Map> responseMap; @@ -183,50 +177,6 @@ public Iterator toXContentChunked(ToXContent.Params params ); } - public static FieldCapabilitiesResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_capabilities_response", - true, - a -> { - Map> responseMap = ((List>>) a[0]).stream() - .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); - List indices = a[1] == null ? Collections.emptyList() : (List) a[1]; - List failures = a[2] == null ? Collections.emptyList() : (List) a[2]; - return new FieldCapabilitiesResponse(indices.toArray(String[]::new), responseMap, failures); - } - ); - - static { - PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> { - Map typeToCapabilities = parseTypeToCapabilities(p, n); - return new Tuple<>(n, typeToCapabilities); - }, FIELDS_FIELD); - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD); - PARSER.declareObjectArray( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> FieldCapabilitiesFailure.fromXContent(p), - FAILURES_FIELD - ); - } - - private static Map parseTypeToCapabilities(XContentParser parser, String name) throws IOException { - Map typeToCapabilities = new HashMap<>(); - - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String type = parser.currentName(); - FieldCapabilities capabilities = FieldCapabilities.fromXContent(name, parser); - typeToCapabilities.put(type, capabilities); - } - return typeToCapabilities; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 8e222e7197180..66434134fa69e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -310,7 +310,7 @@ private static void checkIndexBlocks(ClusterState clusterState, String[] concret } } - private void mergeIndexResponses( + private static void mergeIndexResponses( FieldCapabilitiesRequest request, CancellableTask task, Map indexResponses, @@ -564,7 +564,7 @@ boolean isEmpty() { private class NodeTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(FieldCapabilitiesNodeRequest request, TransportChannel channel, Task task) throws Exception { + public void messageReceived(FieldCapabilitiesNodeRequest request, TransportChannel channel, Task task) { assert task instanceof CancellableTask; final ActionListener listener = new ChannelActionListener<>(channel); ActionListener.completeWith(listener, () -> { diff --git a/server/src/main/java/org/elasticsearch/cluster/ack/ClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/cluster/ack/ClusterStateUpdateRequest.java deleted file mode 100644 index 8841b315b0138..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/ack/ClusterStateUpdateRequest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.ack; - -import org.elasticsearch.core.TimeValue; - -/** - * Base class to be used when needing to update the cluster state - * Contains the basic fields that are always needed - */ -public abstract class ClusterStateUpdateRequest> { - - private TimeValue ackTimeout; - private TimeValue masterNodeTimeout; - - /** - * Returns the maximum time interval to wait for acknowledgements - */ - public TimeValue ackTimeout() { - return ackTimeout; - } - - /** - * Sets the acknowledgement timeout - */ - @SuppressWarnings("unchecked") - public T ackTimeout(TimeValue ackTimeout) { - this.ackTimeout = ackTimeout; - return (T) this; - } - - /** - * Returns the maximum time interval to wait for the request to - * be completed on the master node - */ - public TimeValue masterNodeTimeout() { - return masterNodeTimeout; - } - - /** - * Sets the master node timeout - */ - @SuppressWarnings("unchecked") - public T masterNodeTimeout(TimeValue masterNodeTimeout) { - this.masterNodeTimeout = masterNodeTimeout; - return (T) this; - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java deleted file mode 100644 index b1a52d50fd544..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/ack/IndicesClusterStateUpdateRequest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.cluster.ack; - -import org.elasticsearch.index.Index; - -/** - * Base cluster state update request that allows to execute update against multiple indices - */ -public abstract class IndicesClusterStateUpdateRequest> extends ClusterStateUpdateRequest { - - private Index[] indices; - - /** - * Returns the indices the operation needs to be executed on - */ - public Index[] indices() { - return indices; - } - - /** - * Sets the indices the operation needs to be executed on - */ - @SuppressWarnings("unchecked") - public T indices(Index[] indices) { - this.indices = indices; - return (T) this; - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index ac3e85777f8fb..6d1a874e1c72b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -537,6 +537,11 @@ public Builder template(Template template) { return this; } + public Builder template(Template.Builder template) { + this.template = template.build(); + return this; + } + public Builder componentTemplates(List componentTemplates) { this.componentTemplates = componentTemplates; return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java index be0943f8f3066..271c60e829a87 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java @@ -23,6 +23,8 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.TransportVersions.SEMANTIC_TEXT_SEARCH_INFERENCE_ID; + /** * Contains inference field data for fields. * As inference is done in the coordinator node to avoid re-doing it at shard / replica level, the coordinator needs to check for the need @@ -32,21 +34,33 @@ */ public final class InferenceFieldMetadata implements SimpleDiffable, ToXContentFragment { private static final String INFERENCE_ID_FIELD = "inference_id"; + private static final String SEARCH_INFERENCE_ID_FIELD = "search_inference_id"; private static final String SOURCE_FIELDS_FIELD = "source_fields"; private final String name; private final String inferenceId; + private final String searchInferenceId; private final String[] sourceFields; public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields) { + this(name, inferenceId, inferenceId, sourceFields); + } + + public InferenceFieldMetadata(String name, String inferenceId, String searchInferenceId, String[] sourceFields) { this.name = Objects.requireNonNull(name); this.inferenceId = Objects.requireNonNull(inferenceId); + this.searchInferenceId = Objects.requireNonNull(searchInferenceId); this.sourceFields = Objects.requireNonNull(sourceFields); } public InferenceFieldMetadata(StreamInput input) throws IOException { this.name = input.readString(); this.inferenceId = input.readString(); + if (input.getTransportVersion().onOrAfter(SEMANTIC_TEXT_SEARCH_INFERENCE_ID)) { + this.searchInferenceId = input.readString(); + } else { + this.searchInferenceId = this.inferenceId; + } this.sourceFields = input.readStringArray(); } @@ -54,6 +68,9 @@ public InferenceFieldMetadata(StreamInput input) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(inferenceId); + if (out.getTransportVersion().onOrAfter(SEMANTIC_TEXT_SEARCH_INFERENCE_ID)) { + out.writeString(searchInferenceId); + } out.writeStringArray(sourceFields); } @@ -64,12 +81,13 @@ public boolean equals(Object o) { InferenceFieldMetadata that = (InferenceFieldMetadata) o; return Objects.equals(name, that.name) && Objects.equals(inferenceId, that.inferenceId) + && Objects.equals(searchInferenceId, that.searchInferenceId) && Arrays.equals(sourceFields, that.sourceFields); } @Override public int hashCode() { - int result = Objects.hash(name, inferenceId); + int result = Objects.hash(name, inferenceId, searchInferenceId); result = 31 * result + Arrays.hashCode(sourceFields); return result; } @@ -82,6 +100,10 @@ public String getInferenceId() { return inferenceId; } + public String getSearchInferenceId() { + return searchInferenceId; + } + public String[] getSourceFields() { return sourceFields; } @@ -94,6 +116,9 @@ public static Diff readDiffFrom(StreamInput in) throws I public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); builder.field(INFERENCE_ID_FIELD, inferenceId); + if (searchInferenceId.equals(inferenceId) == false) { + builder.field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId); + } builder.array(SOURCE_FIELDS_FIELD, sourceFields); return builder.endObject(); } @@ -106,6 +131,7 @@ public static InferenceFieldMetadata fromXContent(XContentParser parser) throws String currentFieldName = null; String inferenceId = null; + String searchInferenceId = null; List inputFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -113,6 +139,8 @@ public static InferenceFieldMetadata fromXContent(XContentParser parser) throws } else if (token == XContentParser.Token.VALUE_STRING) { if (INFERENCE_ID_FIELD.equals(currentFieldName)) { inferenceId = parser.text(); + } else if (SEARCH_INFERENCE_ID_FIELD.equals(currentFieldName)) { + searchInferenceId = parser.text(); } } else if (token == XContentParser.Token.START_ARRAY) { if (SOURCE_FIELDS_FIELD.equals(currentFieldName)) { @@ -128,6 +156,11 @@ public static InferenceFieldMetadata fromXContent(XContentParser parser) throws parser.skipChildren(); } } - return new InferenceFieldMetadata(name, inferenceId, inputFields.toArray(String[]::new)); + return new InferenceFieldMetadata( + name, + inferenceId, + searchInferenceId == null ? inferenceId : searchInferenceId, + inputFields.toArray(String[]::new) + ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 9888059af9686..1f9f6f636c1cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -693,7 +693,7 @@ public static Map> v2TemplateOverlaps( private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexTemplate, ClusterState currentState) { // Workaround for the fact that start_time and end_time are injected by the MetadataCreateDataStreamService upon creation, // but when validating templates that create data streams the MetadataCreateDataStreamService isn't used. - var finalTemplate = Optional.ofNullable(indexTemplate.template()); + var finalTemplate = indexTemplate.template(); var finalSettings = Settings.builder(); final var now = Instant.now(); final var metadata = currentState.getMetadata(); @@ -717,18 +717,11 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT // Then apply setting from component templates: finalSettings.put(combinedSettings); // Then finally apply settings resolved from index template: - finalSettings.put(finalTemplate.map(Template::settings).orElse(Settings.EMPTY)); - - var templateToValidate = indexTemplate.toBuilder() - .template( - new Template( - finalSettings.build(), - finalTemplate.map(Template::mappings).orElse(null), - finalTemplate.map(Template::aliases).orElse(null), - finalTemplate.map(Template::lifecycle).orElse(null) - ) - ) - .build(); + if (finalTemplate != null && finalTemplate.settings() != null) { + finalSettings.put(finalTemplate.settings()); + } + + var templateToValidate = indexTemplate.toBuilder().template(Template.builder(finalTemplate).settings(finalSettings)).build(); validate(name, templateToValidate); validateDataStreamsStillReferenced(currentState, name, templateToValidate); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index 3b8b89eb84a67..0a9e79284ced6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -291,4 +291,62 @@ static boolean mappingsEquals(CompressedXContent m1, CompressedXContent m2) { ); return Maps.deepEquals(thisUncompressedMapping, otherUncompressedMapping); } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(@Nullable Template template) { + return template == null ? new Builder() : new Builder(template); + } + + public static class Builder { + private Settings settings = null; + private CompressedXContent mappings = null; + private Map aliases = null; + private DataStreamLifecycle lifecycle = null; + + private Builder() {} + + private Builder(Template template) { + settings = template.settings; + mappings = template.mappings; + aliases = template.aliases; + lifecycle = template.lifecycle; + } + + public Builder settings(Settings settings) { + this.settings = settings; + return this; + } + + public Builder settings(Settings.Builder settings) { + this.settings = settings.build(); + return this; + } + + public Builder mappings(CompressedXContent mappings) { + this.mappings = mappings; + return this; + } + + public Builder aliases(Map aliases) { + this.aliases = aliases; + return this; + } + + public Builder lifecycle(DataStreamLifecycle lifecycle) { + this.lifecycle = lifecycle; + return this; + } + + public Builder lifecycle(DataStreamLifecycle.Builder lifecycle) { + this.lifecycle = lifecycle.build(); + return this; + } + + public Template build() { + return new Template(settings, mappings, aliases, lifecycle); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java b/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java similarity index 96% rename from server/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java rename to server/src/main/java/org/elasticsearch/common/BackoffPolicy.java index b9f89d4a65a01..27d98f9ade203 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BackoffPolicy.java +++ b/server/src/main/java/org/elasticsearch/common/BackoffPolicy.java @@ -6,7 +6,7 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.action.bulk; +package org.elasticsearch.common; import org.elasticsearch.core.TimeValue; @@ -15,8 +15,8 @@ import java.util.NoSuchElementException; /** - * Provides a backoff policy for bulk requests. Whenever a bulk request is rejected due to resource constraints (i.e. the client's internal - * thread pool is full), the backoff policy decides how long the bulk processor will wait before the operation is retried internally. + * Provides a set of generic backoff policies. Backoff policies are used to calculate the number of times an action will be retried + * and the intervals between those retries. * * Notes for implementing custom subclasses: * diff --git a/server/src/main/java/org/elasticsearch/index/query/Rewriteable.java b/server/src/main/java/org/elasticsearch/index/query/Rewriteable.java index b275603fff635..28a0fbc6b59f7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/Rewriteable.java +++ b/server/src/main/java/org/elasticsearch/index/query/Rewriteable.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.query; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.ParsingException; import java.io.IOException; import java.util.ArrayList; @@ -108,7 +107,7 @@ static > void rewriteAndFetch( } } rewriteResponse.onResponse(builder); - } catch (IOException | IllegalArgumentException | ParsingException ex) { + } catch (Exception ex) { rewriteResponse.onFailure(ex); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index fdacb4563ab87..528f0bd6dae08 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; @@ -21,6 +20,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java b/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java index 7b5961c07ae04..e3ecc435b5a36 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; -import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index cb5b8800916f6..b57f14f749b96 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -13,9 +13,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index 8bad8b5003bce..b86292be8e9ee 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -75,31 +75,35 @@ public void clusterChanged(ClusterChangedEvent event) { PersistentTasksCustomMetadata tasks = event.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE); PersistentTasksCustomMetadata previousTasks = event.previousState().getMetadata().custom(PersistentTasksCustomMetadata.TYPE); - // Cluster State Local State Local Action - // STARTED NULL Create as STARTED, Start - // STARTED STARTED Noop - running - // STARTED COMPLETED Noop - waiting for notification ack - // STARTED LOCAL_ABORTED Noop - waiting for notification ack - - // NULL NULL Noop - nothing to do - // NULL STARTED Remove locally, Mark as PENDING_CANCEL, Cancel - // NULL COMPLETED Remove locally - // NULL LOCAL_ABORTED Remove locally - - // Master states: - // NULL - doesn't exist in the cluster state - // STARTED - exist in the cluster state - - // Local state: - // NULL - we don't have task registered locally in runningTasks - // STARTED - registered in TaskManager, requires master notification when finishes - // PENDING_CANCEL - registered in TaskManager, doesn't require master notification when finishes - // COMPLETED - not registered in TaskManager, notified, waiting for master to remove it from CS so we can remove locally - // LOCAL_ABORTED - not registered in TaskManager, notified, waiting for master to adjust it in CS so we can remove locally - - // When task finishes if it is marked as STARTED or PENDING_CANCEL it is marked as COMPLETED and unregistered, - // If the task was STARTED, the master notification is also triggered (this is handled by unregisterTask() method, which is - // triggered by PersistentTaskListener + /* + * Master states: + * NULL - doesn't exist in the cluster state + * STARTED - exist in the cluster state + * + * Local states (see org.elasticsearch.persistent.AllocatedPersistentTask.State) + * NULL - we don't have task registered locally in runningTasks + * STARTED - registered in TaskManager, requires master notification when finishes + * PENDING_CANCEL - registered in TaskManager, doesn't require master notification when finishes + * COMPLETED - not registered in TaskManager, notified, waiting for master to remove it from CS so we can remove locally + * LOCAL_ABORTED - not registered in TaskManager, notified, waiting for master to adjust it in CS so we can remove locally + * + * Master state | Local state | Local action + * ---------------+----------------+----------------------------------------------- + * STARTED | NULL | Create as STARTED, Start + * STARTED | STARTED | Noop - running + * STARTED | PENDING_CANCEL | Impossible + * STARTED | COMPLETED | Noop - waiting for notification ack + * STARTED | LOCAL_ABORTED | Noop - waiting for notification ack + * NULL | NULL | Noop - nothing to do + * NULL | STARTED | Remove locally, Mark as PENDING_CANCEL, Cancel + * NULL | PENDING_CANCEL | Noop - will remove locally when complete + * NULL | COMPLETED | Remove locally + * NULL | LOCAL_ABORTED | Remove locally + * + * When task finishes if it is marked as STARTED or PENDING_CANCEL it is marked as COMPLETED and unregistered, + * If the task was STARTED, the master notification is also triggered (this is handled by unregisterTask() method, which is + * triggered by PersistentTaskListener + */ if (Objects.equals(tasks, previousTasks) == false || event.nodesChanged()) { // We have some changes let's check if they are related to our node diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 5a82211918356..9f943e63ef1e6 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -159,8 +159,8 @@ public static Optional buildSort(List> sortBuilde List sortFormats = new ArrayList<>(sortBuilders.size()); for (SortBuilder builder : sortBuilders) { SortFieldAndFormat sf = builder.build(context); - sortFields.add(sf.field); - sortFormats.add(sf.format); + sortFields.add(sf.field()); + sortFormats.add(sf.format()); } if (sortFields.isEmpty() == false) { // optimize if we just sort on score non reversed, we don't really diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java index 695895950e8fc..8c152ad1de2f3 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java @@ -13,10 +13,7 @@ import java.util.Objects; -public final class SortFieldAndFormat { - - public final SortField field; - public final DocValueFormat format; +public record SortFieldAndFormat(SortField field, DocValueFormat format) { public SortFieldAndFormat(SortField field, DocValueFormat format) { this.field = Objects.requireNonNull(field); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index ebf9f2e27118d..0c9833fad7640 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -14,12 +14,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java deleted file mode 100644 index bda9bb337096d..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; - -import java.util.Iterator; -import java.util.NoSuchElementException; -import java.util.concurrent.atomic.AtomicInteger; - -import static org.elasticsearch.core.TimeValue.timeValueMillis; - -public class BackoffPolicyTests extends ESTestCase { - public void testWrapBackoffPolicy() { - TimeValue timeValue = timeValueMillis(between(0, Integer.MAX_VALUE)); - int maxNumberOfRetries = between(1, 1000); - BackoffPolicy policy = BackoffPolicy.constantBackoff(timeValue, maxNumberOfRetries); - AtomicInteger retries = new AtomicInteger(); - policy = BackoffPolicy.wrap(policy, retries::getAndIncrement); - - int expectedRetries = 0; - { - // Fetching the iterator doesn't call the callback - Iterator itr = policy.iterator(); - assertEquals(expectedRetries, retries.get()); - - while (itr.hasNext()) { - // hasNext doesn't trigger the callback - assertEquals(expectedRetries, retries.get()); - // next does - itr.next(); - expectedRetries += 1; - assertEquals(expectedRetries, retries.get()); - } - // next doesn't call the callback when there isn't a backoff available - expectThrows(NoSuchElementException.class, () -> itr.next()); - assertEquals(expectedRetries, retries.get()); - } - { - // The second iterator also calls the callback - Iterator itr = policy.iterator(); - itr.next(); - expectedRetries += 1; - assertEquals(expectedRetries, retries.get()); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 5a71473e9b0ed..3be942bcd291e 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -147,13 +146,11 @@ public class BulkOperationTests extends ESTestCase { ComposableIndexTemplate.builder() .indexPatterns(List.of(dataStreamName)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, false)) - .template(new Template(null, null, null, null)) .build(), "ds-template-with-failure-store", ComposableIndexTemplate.builder() .indexPatterns(List.of(fsDataStreamName, fsRolloverDataStreamName)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) - .template(new Template(null, null, null, null)) .build() ) ) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java index 2ab51816043c0..24cc3265e0429 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 461d8634fb56e..e6181a4ff9cb9 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index 1b2384b23e413..6ea4a1d3dc46b 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -119,7 +119,7 @@ public void testFailureParsing() throws IOException { ); FieldCapabilitiesResponse parsedResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - parsedResponse = FieldCapabilitiesResponse.fromXContent(parser); + parsedResponse = FieldCapsUtils.parseFieldCapsResponse(parser); assertNull(parser.nextToken()); } assertNotSame(parsedResponse, randomResponse); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java index 27e36b6d35b7e..ed1af12965841 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java @@ -35,7 +35,7 @@ public class FieldCapabilitiesTests extends AbstractXContentSerializingTestCase< @Override protected FieldCapabilities doParseInstance(XContentParser parser) throws IOException { - return FieldCapabilities.fromXContent(FIELD_NAME, parser); + return FieldCapsUtils.parseFieldCaps(FIELD_NAME, parser); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java index 7b19f34cfe6cc..2059e9dd78b04 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java @@ -26,7 +26,7 @@ public class MergedFieldCapabilitiesResponseTests extends AbstractChunkedSeriali @Override protected FieldCapabilitiesResponse doParseInstance(XContentParser parser) throws IOException { - return FieldCapabilitiesResponse.fromXContent(parser); + return FieldCapsUtils.parseFieldCapsResponse(parser); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java index dd78b599bdb5a..e20788e341b7e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java @@ -154,45 +154,27 @@ public static ComponentTemplate mutateTemplate(ComponentTemplate orig) { Template ot = orig.template(); yield switch (randomIntBetween(0, 3)) { case 0 -> new ComponentTemplate( - new Template( - randomValueOtherThan(ot.settings(), ComponentTemplateTests::randomSettings), - ot.mappings(), - ot.aliases(), - ot.lifecycle() - ), + Template.builder(ot).settings(randomValueOtherThan(ot.settings(), ComponentTemplateTests::randomSettings)).build(), orig.version(), orig.metadata(), orig.deprecated() ); case 1 -> new ComponentTemplate( - new Template( - ot.settings(), - randomValueOtherThan(ot.mappings(), ComponentTemplateTests::randomMappings), - ot.aliases(), - ot.lifecycle() - ), + Template.builder(ot).mappings(randomValueOtherThan(ot.mappings(), ComponentTemplateTests::randomMappings)).build(), orig.version(), orig.metadata(), orig.deprecated() ); case 2 -> new ComponentTemplate( - new Template( - ot.settings(), - ot.mappings(), - randomValueOtherThan(ot.aliases(), ComponentTemplateTests::randomAliases), - ot.lifecycle() - ), + Template.builder(ot).aliases(randomValueOtherThan(ot.aliases(), ComponentTemplateTests::randomAliases)).build(), orig.version(), orig.metadata(), orig.deprecated() ); case 3 -> new ComponentTemplate( - new Template( - ot.settings(), - ot.mappings(), - ot.aliases(), - randomValueOtherThan(ot.lifecycle(), DataStreamLifecycleTests::randomLifecycle) - ), + Template.builder(ot) + .lifecycle(randomValueOtherThan(ot.lifecycle(), DataStreamLifecycleTests::randomLifecycle)) + .build(), orig.version(), orig.metadata(), orig.deprecated() diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index 2cc5f509c3164..daa303440bcf4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -60,23 +60,23 @@ protected ComposableIndexTemplate createTestInstance() { } public static ComposableIndexTemplate randomInstance() { - Settings settings = null; - CompressedXContent mappings = null; - Map aliases = null; Template template = null; ComposableIndexTemplate.DataStreamTemplate dataStreamTemplate = randomDataStreamTemplate(); - + Template.Builder builder = Template.builder(); if (dataStreamTemplate != null || randomBoolean()) { if (randomBoolean()) { - settings = randomSettings(); + builder.settings(randomSettings()); } if (dataStreamTemplate != null || randomBoolean()) { - mappings = randomMappings(dataStreamTemplate); + builder.mappings(randomMappings(dataStreamTemplate)); } if (dataStreamTemplate == null && randomBoolean()) { - aliases = randomAliases(); + builder.aliases(randomAliases()); } - template = new Template(settings, mappings, aliases); + if (dataStreamTemplate != null && randomBoolean()) { + builder.lifecycle(DataStreamLifecycleTests.randomLifecycle()); + } + template = builder.build(); } Map meta = null; @@ -169,7 +169,12 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori .template( randomValueOtherThan( orig.template(), - () -> new Template(randomSettings(), randomMappings(orig.getDataStreamTemplate()), randomAliases()) + () -> Template.builder() + .settings(randomSettings()) + .mappings(randomMappings(orig.getDataStreamTemplate())) + .aliases(randomAliases()) + .lifecycle(orig.getDataStreamTemplate() == null ? null : DataStreamLifecycleTests.randomLifecycle()) + .build() ) ) .build(); @@ -261,5 +266,9 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws public void testBuilderRoundtrip() { ComposableIndexTemplate template = randomInstance(); assertEquals(template, template.toBuilder().build()); + + if (template.template() != null) { + assertEquals(template.template(), Template.builder(template.template()).build()); + } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleWithRetentionWarningsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleWithRetentionWarningsTests.java index 8d31904a88079..d7f10f484165b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleWithRetentionWarningsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleWithRetentionWarningsTests.java @@ -171,7 +171,7 @@ public void testValidateLifecycleIndexTemplateWithWarning() { Metadata.builder().build(), randomAlphaOfLength(10), ComposableIndexTemplate.builder() - .template(new Template(null, null, null, DataStreamLifecycle.DEFAULT)) + .template(Template.builder().lifecycle(DataStreamLifecycle.DEFAULT)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .indexPatterns(List.of(randomAlphaOfLength(10))) .build(), @@ -197,7 +197,7 @@ public void testValidateInternalDataStreamRetentionWithoutWarning() { Metadata.builder().build(), randomAlphaOfLength(10), ComposableIndexTemplate.builder() - .template(new Template(null, null, null, DataStreamLifecycle.DEFAULT)) + .template(Template.builder().lifecycle(DataStreamLifecycle.DEFAULT)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .indexPatterns(List.of("." + randomAlphaOfLength(10))) .build(), @@ -220,16 +220,15 @@ public void testValidateLifecycleComponentTemplateWithWarning() { Map.of( "component-template", new ComponentTemplate( - new Template( - null, - null, - null, - new DataStreamLifecycle( - new DataStreamLifecycle.Retention(randomTimeValue(2, 100, TimeUnit.DAYS)), - null, - null + Template.builder() + .lifecycle( + new DataStreamLifecycle( + new DataStreamLifecycle.Retention(randomTimeValue(2, 100, TimeUnit.DAYS)), + null, + null + ) ) - ), + .build(), null, null ) @@ -238,7 +237,7 @@ public void testValidateLifecycleComponentTemplateWithWarning() { .build(), randomAlphaOfLength(10), ComposableIndexTemplate.builder() - .template(new Template(null, null, null, DataStreamLifecycle.DEFAULT)) + .template(Template.builder().lifecycle(DataStreamLifecycle.DEFAULT)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .indexPatterns(List.of(randomAlphaOfLength(10))) .componentTemplates(List.of("component-template")) @@ -291,12 +290,11 @@ public void testValidateLifecycleInComponentTemplate() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); HeaderWarning.setThreadContext(threadContext); - Template template = new Template( - ComponentTemplateTests.randomSettings(), - null, - ComponentTemplateTests.randomAliases(), - DataStreamLifecycle.DEFAULT - ); + Template template = Template.builder() + .settings(ComponentTemplateTests.randomSettings()) + .aliases(ComponentTemplateTests.randomAliases()) + .lifecycle(DataStreamLifecycle.DEFAULT) + .build(); ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); state = metadataIndexTemplateService.addComponentTemplate(state, false, "foo", componentTemplate); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java index 6107246cf8ff1..2d5805696320d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java @@ -61,13 +61,15 @@ protected boolean supportsUnknownFields() { private static InferenceFieldMetadata createTestItem() { String name = randomAlphaOfLengthBetween(3, 10); String inferenceId = randomIdentifier(); + String searchInferenceId = randomIdentifier(); String[] inputFields = generateRandomStringArray(5, 10, false, false); - return new InferenceFieldMetadata(name, inferenceId, inputFields); + return new InferenceFieldMetadata(name, inferenceId, searchInferenceId, inputFields); } public void testNullCtorArgsThrowException() { - assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", new String[0])); - assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, new String[0])); - assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null)); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", "searchInferenceId", new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, "searchInferenceId", new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null, new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", "searchInferenceId", null)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 7a1d4b5b1ddf4..5fadd8f263f7c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -545,14 +545,7 @@ public void testUpdateIndexTemplateV2() throws Exception { List patterns = new ArrayList<>(template.indexPatterns()); patterns.add("new-pattern"); - template = ComposableIndexTemplate.builder() - .indexPatterns(patterns) - .template(template.template()) - .componentTemplates(template.composedOf()) - .priority(template.priority()) - .version(template.version()) - .metadata(template.metadata()) - .build(); + template = template.toBuilder().indexPatterns(patterns).build(); state = metadataIndexTemplateService.addIndexTemplateV2(state, false, "foo", template); assertNotNull(state.metadata().templatesV2().get("foo")); @@ -1621,7 +1614,7 @@ private ClusterState addComponentTemplate( String name, DataStreamLifecycle lifecycle ) throws Exception { - ComponentTemplate ct = new ComponentTemplate(new Template(null, null, null, lifecycle), null, null); + ComponentTemplate ct = new ComponentTemplate(Template.builder().lifecycle(lifecycle).build(), null, null); return service.addComponentTemplate(state, true, name, ct); } @@ -1634,7 +1627,7 @@ private void assertLifecycleResolution( ) throws Exception { ComposableIndexTemplate it = ComposableIndexTemplate.builder() .indexPatterns(List.of(randomAlphaOfLength(10) + "*")) - .template(new Template(null, null, null, lifecycleZ)) + .template(Template.builder().lifecycle(lifecycleZ)) .componentTemplates(composeOf) .priority(0L) .version(1L) @@ -1858,7 +1851,7 @@ public void testIndexTemplateFailsToAdd() throws Exception { ClusterState state = ClusterState.EMPTY_STATE; ComponentTemplate ct = new ComponentTemplate( - new Template(null, null, null, DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build()), + Template.builder().lifecycle(DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999())).build(), null, null ); diff --git a/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java b/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java new file mode 100644 index 0000000000000..0cbbcdc0f1674 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/BackoffPolicyTests.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; + +public class BackoffPolicyTests extends ESTestCase { + public void testWrapBackoffPolicy() { + TimeValue timeValue = timeValueMillis(between(0, Integer.MAX_VALUE)); + int maxNumberOfRetries = between(1, 1000); + BackoffPolicy policy = BackoffPolicy.constantBackoff(timeValue, maxNumberOfRetries); + AtomicInteger retries = new AtomicInteger(); + policy = BackoffPolicy.wrap(policy, retries::getAndIncrement); + + int expectedRetries = 0; + { + // Fetching the iterator doesn't call the callback + Iterator itr = policy.iterator(); + assertEquals(expectedRetries, retries.get()); + + while (itr.hasNext()) { + // hasNext doesn't trigger the callback + assertEquals(expectedRetries, retries.get()); + // next does + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + // next doesn't call the callback when there isn't a backoff available + expectThrows(NoSuchElementException.class, () -> itr.next()); + assertEquals(expectedRetries, retries.get()); + } + { + // The second iterator also calls the callback + Iterator itr = policy.iterator(); + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + } + + public void testExponentialBackOff() { + long initialDelayMillis = randomLongBetween(0, 100); + int maxNumberOfRetries = randomIntBetween(0, 10); + BackoffPolicy exponentialBackoff = BackoffPolicy.exponentialBackoff(timeValueMillis(initialDelayMillis), maxNumberOfRetries); + int numberOfBackoffsToPerform = randomIntBetween(1, 3); + for (int i = 0; i < numberOfBackoffsToPerform; i++) { + Iterator iterator = exponentialBackoff.iterator(); + TimeValue lastTimeValue = null; + int counter = 0; + while (iterator.hasNext()) { + TimeValue timeValue = iterator.next(); + if (lastTimeValue == null) { + assertEquals(timeValueMillis(initialDelayMillis), timeValue); + } else { + // intervals should be always increasing + assertTrue(timeValue.compareTo(lastTimeValue) > 0); + } + lastTimeValue = timeValue; + counter++; + } + assertEquals(maxNumberOfRetries, counter); + } + } + + public void testNoBackoff() { + BackoffPolicy noBackoff = BackoffPolicy.noBackoff(); + int numberOfBackoffsToPerform = randomIntBetween(1, 3); + for (int i = 0; i < numberOfBackoffsToPerform; i++) { + Iterator iterator = noBackoff.iterator(); + assertFalse(iterator.hasNext()); + } + } + + public void testConstantBackoff() { + long delayMillis = randomLongBetween(0, 100); + int maxNumberOfRetries = randomIntBetween(0, 10); + BackoffPolicy exponentialBackoff = BackoffPolicy.constantBackoff(timeValueMillis(delayMillis), maxNumberOfRetries); + int numberOfBackoffsToPerform = randomIntBetween(1, 3); + for (int i = 0; i < numberOfBackoffsToPerform; i++) { + final Iterator iterator = exponentialBackoff.iterator(); + int counter = 0; + while (iterator.hasNext()) { + TimeValue timeValue = iterator.next(); + assertEquals(timeValueMillis(delayMillis), timeValue); + counter++; + } + assertEquals(maxNumberOfRetries, counter); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 1b4a093dc70e4..e088e8569bf8a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -820,7 +820,7 @@ private static TopFieldDocs search( Query query = new BooleanQuery.Builder().add(queryBuilder.toQuery(searchExecutionContext), Occur.MUST) .add(Queries.newNonNestedFilter(searchExecutionContext.indexVersionCreated()), Occur.FILTER) .build(); - Sort sort = new Sort(sortBuilder.build(searchExecutionContext).field); + Sort sort = new Sort(sortBuilder.build(searchExecutionContext).field()); return searcher.search(query, 10, sort); } diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 583cdf302ad65..dc1f12d6cf657 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -152,7 +152,7 @@ public void testBuildSortField() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { T sortBuilder = createTestItem(); SortFieldAndFormat sortField = Rewriteable.rewrite(sortBuilder, mockShardContext).build(mockShardContext); - sortFieldAssertions(sortBuilder, sortField.field, sortField.format); + sortFieldAssertions(sortBuilder, sortField.field(), sortField.format()); } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 433a1d8eaf2f4..5f08a3f1143e0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -150,26 +150,26 @@ protected void sortFieldAssertions(FieldSortBuilder builder, SortField sortField public void testBuildSortFieldMissingValue() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("value").missing("_first"); - SortField sortField = fieldSortBuilder.build(searchExecutionContext).field; + SortField sortField = fieldSortBuilder.build(searchExecutionContext).field(); SortedNumericSortField expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(Double.NEGATIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").missing("_last"); - sortField = fieldSortBuilder.build(searchExecutionContext).field; + sortField = fieldSortBuilder.build(searchExecutionContext).field(); expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); Double randomDouble = randomDouble(); fieldSortBuilder = new FieldSortBuilder("value").missing(randomDouble); - sortField = fieldSortBuilder.build(searchExecutionContext).field; + sortField = fieldSortBuilder.build(searchExecutionContext).field(); expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(randomDouble); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").missing(randomDouble.toString()); - sortField = fieldSortBuilder.build(searchExecutionContext).field; + sortField = fieldSortBuilder.build(searchExecutionContext).field(); expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(randomDouble); assertEquals(expectedSortField, sortField); @@ -181,19 +181,19 @@ public void testBuildSortFieldMissingValue() throws IOException { public void testBuildSortFieldOrder() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("value"); - SortField sortField = fieldSortBuilder.build(searchExecutionContext).field; + SortField sortField = fieldSortBuilder.build(searchExecutionContext).field(); SortedNumericSortField expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, false); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").order(SortOrder.ASC); - sortField = fieldSortBuilder.build(searchExecutionContext).field; + sortField = fieldSortBuilder.build(searchExecutionContext).field(); expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, false); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").order(SortOrder.DESC); - sortField = fieldSortBuilder.build(searchExecutionContext).field; + sortField = fieldSortBuilder.build(searchExecutionContext).field(); expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, true, SortedNumericSelector.Type.MAX); expectedSortField.setMissingValue(Double.NEGATIVE_INFINITY); assertEquals(expectedSortField, sortField); @@ -206,44 +206,44 @@ public void testMultiValueMode() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); FieldSortBuilder sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MIN); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedNumericSortField.class)); SortedNumericSortField numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MIN, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MAX); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MAX, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.SUM); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.SUM, comparatorSource.sortMode()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.AVG); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.AVG, comparatorSource.sortMode()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MEDIAN); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MEDIAN, comparatorSource.sortMode()); // sort mode should also be set by build() implicitly to MIN or MAX if not set explicitly on builder sortBuilder = new FieldSortBuilder("value"); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MIN, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").order(SortOrder.DESC); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MAX, numericSortField.getSelector()); @@ -258,7 +258,7 @@ public void testBuildNested() throws IOException { FieldSortBuilder sortBuilder = new FieldSortBuilder("fieldName").setNestedSort( new NestedSortBuilder("path").setFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")) ); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); Nested nested = comparatorSource.nested(); @@ -267,7 +267,7 @@ public void testBuildNested() throws IOException { NestedSortBuilder nestedSort = new NestedSortBuilder("path"); sortBuilder = new FieldSortBuilder("fieldName").setNestedSort(nestedSort); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); @@ -276,7 +276,7 @@ public void testBuildNested() throws IOException { nestedSort.setFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); sortBuilder = new FieldSortBuilder("fieldName").setNestedSort(nestedSort); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); @@ -308,27 +308,27 @@ public void testShardDocSort() throws IOException { reverse ? SortOrder.DESC : SortOrder.ASC ); SortFieldAndFormat sortAndFormat = sortBuilder.build(searchExecutionContext); - assertThat(sortAndFormat.field.getClass(), equalTo(ShardDocSortField.class)); - ShardDocSortField sortField = (ShardDocSortField) sortAndFormat.field; + assertThat(sortAndFormat.field().getClass(), equalTo(ShardDocSortField.class)); + ShardDocSortField sortField = (ShardDocSortField) sortAndFormat.field(); assertThat(sortField.getShardRequestIndex(), equalTo(searchExecutionContext.getShardRequestIndex())); assertThat(sortField.getReverse(), equalTo(reverse)); - assertThat(sortAndFormat.format, equalTo(DocValueFormat.RAW)); + assertThat(sortAndFormat.format(), equalTo(DocValueFormat.RAW)); } public void testFormatDateTime() throws Exception { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); SortFieldAndFormat sortAndFormat = SortBuilders.fieldSort("custom-date").build(searchExecutionContext); - assertThat(sortAndFormat.format.formatSortValue(1615580798601L), equalTo(1615580798601L)); + assertThat(sortAndFormat.format().formatSortValue(1615580798601L), equalTo(1615580798601L)); sortAndFormat = SortBuilders.fieldSort("custom-date").setFormat("yyyy-MM-dd").build(searchExecutionContext); - assertThat(sortAndFormat.format.formatSortValue(1615580798601L), equalTo("2021-03-12")); + assertThat(sortAndFormat.format().formatSortValue(1615580798601L), equalTo("2021-03-12")); sortAndFormat = SortBuilders.fieldSort("custom-date").setFormat("epoch_millis").build(searchExecutionContext); - assertThat(sortAndFormat.format.formatSortValue(1615580798601L), equalTo("1615580798601")); + assertThat(sortAndFormat.format().formatSortValue(1615580798601L), equalTo("1615580798601")); sortAndFormat = SortBuilders.fieldSort("custom-date").setFormat("yyyy/MM/dd HH:mm:ss").build(searchExecutionContext); - assertThat(sortAndFormat.format.formatSortValue(1615580798601L), equalTo("2021/03/12 20:26:38")); + assertThat(sortAndFormat.format().formatSortValue(1615580798601L), equalTo("2021/03/12 20:26:38")); } public void testInvalidFormat() { @@ -371,12 +371,12 @@ public void testModeNonNumericField() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); FieldSortBuilder sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.MIN); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedSetSortField.class)); assertEquals(SortedSetSelector.Type.MIN, ((SortedSetSortField) sortField).getSelector()); sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.MAX); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField, instanceOf(SortedSetSortField.class)); assertEquals(SortedSetSelector.Type.MAX, ((SortedSetSortField) sortField).getSelector()); diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 18f63821e721b..17a9fb5974176 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -367,32 +367,32 @@ public void testCommonCaseIsOptimized() throws IOException { // The common case should use LatLonDocValuesField.newDistanceSort GeoDistanceSortBuilder builder = new GeoDistanceSortBuilder("", new GeoPoint(3.5, 2.1)); SortFieldAndFormat sort = builder.build(context); - assertEquals(LatLonDocValuesField.newDistanceSort("random_field_name", 3.5, 2.1).getClass(), sort.field.getClass()); + assertEquals(LatLonDocValuesField.newDistanceSort("random_field_name", 3.5, 2.1).getClass(), sort.field().getClass()); // however this might be disabled by fancy options builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1), new GeoPoint(3.0, 4)); sort = builder.build(context); - assertEquals(SortField.class, sort.field.getClass()); // 2 points -> plain SortField with a custom comparator + assertEquals(SortField.class, sort.field().getClass()); // 2 points -> plain SortField with a custom comparator builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1)); builder.unit(DistanceUnit.KILOMETERS); sort = builder.build(context); - assertEquals(SortField.class, sort.field.getClass()); // km rather than m -> plain SortField with a custom comparator + assertEquals(SortField.class, sort.field().getClass()); // km rather than m -> plain SortField with a custom comparator builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1)); builder.order(SortOrder.DESC); sort = builder.build(context); - assertEquals(SortField.class, sort.field.getClass()); // descending means the max value should be considered rather than min + assertEquals(SortField.class, sort.field().getClass()); // descending means the max value should be considered rather than min builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1)); builder.setNestedSort(new NestedSortBuilder("path")); sort = builder.build(context); - assertEquals(SortField.class, sort.field.getClass()); // can't use LatLon optimized sorting with nested fields + assertEquals(SortField.class, sort.field().getClass()); // can't use LatLon optimized sorting with nested fields builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1)); builder.order(SortOrder.DESC); sort = builder.build(context); - assertEquals(SortField.class, sort.field.getClass()); // can't use LatLon optimized sorting with DESC sorting + assertEquals(SortField.class, sort.field().getClass()); // can't use LatLon optimized sorting with DESC sorting } /** @@ -401,13 +401,13 @@ public void testCommonCaseIsOptimized() throws IOException { public void testBuildSortFieldOrder() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0); - assertEquals(false, geoDistanceSortBuilder.build(searchExecutionContext).field.getReverse()); + assertEquals(false, geoDistanceSortBuilder.build(searchExecutionContext).field().getReverse()); geoDistanceSortBuilder.order(SortOrder.ASC); - assertEquals(false, geoDistanceSortBuilder.build(searchExecutionContext).field.getReverse()); + assertEquals(false, geoDistanceSortBuilder.build(searchExecutionContext).field().getReverse()); geoDistanceSortBuilder.order(SortOrder.DESC); - assertEquals(true, geoDistanceSortBuilder.build(searchExecutionContext).field.getReverse()); + assertEquals(true, geoDistanceSortBuilder.build(searchExecutionContext).field().getReverse()); } /** @@ -417,7 +417,7 @@ public void testMultiValueMode() throws IOException { SearchExecutionContext searchExecutionContext = createMockSearchExecutionContext(); GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0); geoDistanceSortBuilder.sortMode(SortMode.MAX); - SortField sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + SortField sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MAX, comparatorSource.sortMode()); @@ -425,7 +425,7 @@ public void testMultiValueMode() throws IOException { // also use MultiValueMode.Max if no Mode set but order is DESC geoDistanceSortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0); geoDistanceSortBuilder.order(SortOrder.DESC); - sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MAX, comparatorSource.sortMode()); @@ -434,7 +434,7 @@ public void testMultiValueMode() throws IOException { geoDistanceSortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0); // need to use distance unit other than Meters to not get back a LatLonPointSortField geoDistanceSortBuilder.order(SortOrder.ASC).unit(DistanceUnit.INCH); - sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MIN, comparatorSource.sortMode()); @@ -442,19 +442,19 @@ public void testMultiValueMode() throws IOException { geoDistanceSortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0); // need to use distance unit other than Meters to not get back a LatLonPointSortField geoDistanceSortBuilder.sortMode(SortMode.MIN).unit(DistanceUnit.INCH); - sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MIN, comparatorSource.sortMode()); geoDistanceSortBuilder.sortMode(SortMode.AVG); - sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.AVG, comparatorSource.sortMode()); geoDistanceSortBuilder.sortMode(SortMode.MEDIAN); - sortField = geoDistanceSortBuilder.build(searchExecutionContext).field; + sortField = geoDistanceSortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MEDIAN, comparatorSource.sortMode()); @@ -469,7 +469,7 @@ public void testBuildNested() throws IOException { GeoDistanceSortBuilder sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedSort( new NestedSortBuilder("path").setFilter(QueryBuilders.matchAllQuery()) ); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); Nested nested = comparatorSource.nested(); @@ -477,7 +477,7 @@ public void testBuildNested() throws IOException { assertEquals(new MatchAllDocsQuery(), nested.getInnerQuery()); sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedSort(new NestedSortBuilder("path")); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); @@ -487,7 +487,7 @@ public void testBuildNested() throws IOException { sortBuilder = new GeoDistanceSortBuilder("fieldName", 1.0, 1.0).setNestedSort( new NestedSortBuilder("path").setFilter(QueryBuilders.matchAllQuery()) ); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); @@ -504,7 +504,7 @@ public void testBuildCoerce() throws IOException { sortBuilder.validation(GeoValidationMethod.COERCE); assertEquals(-180.0, sortBuilder.points()[0].getLat(), 0.0); assertEquals(-360.0, sortBuilder.points()[0].getLon(), 0.0); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertEquals(LatLonDocValuesField.newDistanceSort("fieldName", 0.0, 180.0), sortField); } diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 98345d0cb4edd..872775e18c7d1 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -273,7 +273,7 @@ public void testMultiValueMode() throws IOException { for (SortMode mode : SortMode.values()) { ScriptSortBuilder sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER); sortBuilder.sortMode(mode); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.fromString(mode.toString()), comparatorSource.sortMode()); @@ -282,14 +282,14 @@ public void testMultiValueMode() throws IOException { // check that without mode set, order ASC sets mode to MIN, DESC to MAX ScriptSortBuilder sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER); sortBuilder.order(SortOrder.ASC); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MIN, comparatorSource.sortMode()); sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER); sortBuilder.order(SortOrder.DESC); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MAX, comparatorSource.sortMode()); @@ -300,15 +300,15 @@ public void testMultiValueMode() throws IOException { */ public void testBuildCorrectComparatorType() throws IOException { ScriptSortBuilder sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.STRING); - SortField sortField = sortBuilder.build(createMockSearchExecutionContext()).field; + SortField sortField = sortBuilder.build(createMockSearchExecutionContext()).field(); assertThat(sortField.getComparatorSource(), instanceOf(BytesRefFieldComparatorSource.class)); sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER); - sortField = sortBuilder.build(createMockSearchExecutionContext()).field; + sortField = sortBuilder.build(createMockSearchExecutionContext()).field(); assertThat(sortField.getComparatorSource(), instanceOf(DoubleValuesComparatorSource.class)); sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.VERSION); - sortField = sortBuilder.build(createMockSearchExecutionContext()).field; + sortField = sortBuilder.build(createMockSearchExecutionContext()).field(); assertThat(sortField.getComparatorSource(), instanceOf(BytesRefFieldComparatorSource.class)); } @@ -321,7 +321,7 @@ public void testBuildNested() throws IOException { ScriptSortBuilder sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER).setNestedSort( new NestedSortBuilder("path").setFilter(QueryBuilders.matchAllQuery()) ); - SortField sortField = sortBuilder.build(searchExecutionContext).field; + SortField sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); Nested nested = comparatorSource.nested(); @@ -331,7 +331,7 @@ public void testBuildNested() throws IOException { sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER).setNestedSort( new NestedSortBuilder("path") ); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); @@ -341,7 +341,7 @@ public void testBuildNested() throws IOException { sortBuilder = new ScriptSortBuilder(mockScript(MOCK_SCRIPT_NAME), ScriptSortType.NUMBER).setNestedSort( new NestedSortBuilder("path").setFilter(QueryBuilders.matchAllQuery()) ); - sortField = sortBuilder.build(searchExecutionContext).field; + sortField = sortBuilder.build(searchExecutionContext).field(); assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); diff --git a/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapsUtils.java b/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapsUtils.java new file mode 100644 index 0000000000000..84c057d3b6a81 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapsUtils.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Utilities for parsing field_caps responses for test purposes. + */ +public enum FieldCapsUtils { + ; + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser FAILURE_PARSER = new ConstructingObjectParser<>( + "field_capabilities_failure", + true, + a -> new FieldCapabilitiesFailure(((List) a[0]).toArray(String[]::new), (Exception) a[1]) + ); + + static { + FAILURE_PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), FieldCapabilitiesFailure.INDICES_FIELD); + FAILURE_PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.currentToken(), p); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); + Exception e = ElasticsearchException.failureFromXContent(p); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); + return e; + }, FieldCapabilitiesFailure.FAILURE_FIELD); + } + + public static FieldCapabilitiesFailure parseFailure(XContentParser parser) throws IOException { + return FAILURE_PARSER.parse(parser, null); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "field_capabilities_response", + true, + a -> { + Map> responseMap = ((List>>) a[0]).stream() + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); + List indices = a[1] == null ? Collections.emptyList() : (List) a[1]; + List failures = a[2] == null ? Collections.emptyList() : (List) a[2]; + return new FieldCapabilitiesResponse(indices.toArray(String[]::new), responseMap, failures); + } + ); + + static { + PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> { + Map typeToCapabilities = parseTypeToCapabilities(p, n); + return new Tuple<>(n, typeToCapabilities); + }, FieldCapabilitiesResponse.FIELDS_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilitiesResponse.INDICES_FIELD); + PARSER.declareObjectArray( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> parseFailure(p), + FieldCapabilitiesResponse.FAILURES_FIELD + ); + } + + public static FieldCapabilitiesResponse parseFieldCapsResponse(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private static Map parseTypeToCapabilities(XContentParser parser, String name) throws IOException { + Map typeToCapabilities = new HashMap<>(); + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String type = parser.currentName(); + FieldCapabilities capabilities = parseFieldCaps(name, parser); + typeToCapabilities.put(type, capabilities); + } + return typeToCapabilities; + } + + public static FieldCapabilities parseFieldCaps(String name, XContentParser parser) throws IOException { + return FIELD_CAPS_PARSER.parse(parser, name); + } + + private static final InstantiatingObjectParser FIELD_CAPS_PARSER; + + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "field_capabilities", + true, + FieldCapabilities.class + ); + parser.declareString(ConstructingObjectParser.constructorArg(), FieldCapabilities.TYPE_FIELD); + parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.IS_METADATA_FIELD); + parser.declareBoolean(ConstructingObjectParser.constructorArg(), FieldCapabilities.SEARCHABLE_FIELD); + parser.declareBoolean(ConstructingObjectParser.constructorArg(), FieldCapabilities.AGGREGATABLE_FIELD); + parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.TIME_SERIES_DIMENSION_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.TIME_SERIES_METRIC_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.NON_SEARCHABLE_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.NON_AGGREGATABLE_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.NON_DIMENSION_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FieldCapabilities.METRIC_CONFLICTS_INDICES_FIELD); + parser.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, context) -> p.map(HashMap::new, v -> Set.copyOf(v.list())), + new ParseField("meta") + ); + FIELD_CAPS_PARSER = parser.build(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5c25e0cc3b0d9..6ed0a1dfe0229 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.fieldcaps.FieldCapsUtils; import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -2458,7 +2459,7 @@ protected FieldCapabilitiesResponse fieldCaps( Response response = restClient.performRequest(request); assertOK(response); try (XContentParser parser = responseAsParser(response)) { - return FieldCapabilitiesResponse.fromXContent(parser); + return FieldCapsUtils.parseFieldCapsResponse(parser); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java index ffadf4cafaf12..febe6e97a12aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java @@ -29,21 +29,30 @@ *

* The word "total" in an attribute name indicates that the attribute is a sum across all nodes. * - * @param currentTotalNodes the count of nodes that are currently in the cluster - * @param currentPerNodeMemoryBytes the minimum size (memory) of all nodes in the cluster - * @param currentTotalModelMemoryBytes the sum of model memory over every assignment/deployment - * @param currentTotalProcessorsInUse the sum of processors used over every assignment/deployment - * @param currentPerNodeMemoryOverheadBytes always equal to MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD - * @param wantedMinNodes the minimum number of nodes that must be provided by the autoscaler - * @param wantedExtraPerNodeMemoryBytes the amount of additional memory that must be provided on every node - * (this value must be >0 to trigger a scale up based on memory) - * @param wantedExtraPerNodeNodeProcessors the number of additional processors that must be provided on every node - * (this value must be >0 to trigger a scale up based on processors) - * @param wantedExtraModelMemoryBytes the amount of additional model memory that is newly required - * (due to a new assignment/deployment) - * @param wantedExtraProcessors the number of additional processors that are required to be added to the cluster - * @param unwantedNodeMemoryBytesToRemove the amount of memory that should be removed from the cluster. If this is equal to the amount of - * memory provided by a node, a node will be removed. + * @param currentTotalNodes The count of nodes that are currently in the cluster, + * used to confirm that both sides have same view of current state + * @param currentPerNodeMemoryBytes The minimum size (memory) of all nodes in the cluster + * used to confirm that both sides have same view of current state. + * @param currentTotalModelMemoryBytes The sum of model memory over every assignment/deployment, used to calculate requirements + * @param currentTotalProcessorsInUse The sum of processors used over every assignment/deployment, not used by autoscaler + * @param currentPerNodeMemoryOverheadBytes Always equal to MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD, + * @param wantedMinNodes The minimum number of nodes that must be provided by the autoscaler + * @param wantedExtraPerNodeMemoryBytes If there are jobs or trained models that have been started but cannot be allocated on the + * ML nodes currently within the cluster then this will be the *max* of the ML native memory + * requirements of those jobs/trained models. The metric is in terms of ML native memory, + * not container memory. + * @param wantedExtraPerNodeNodeProcessors If there are trained model allocations that have been started but cannot be allocated on the + * ML nodes currently within the cluster then this will be the *max* of the vCPU requirements of + * those allocations. Zero otherwise. + * @param wantedExtraModelMemoryBytes If there are jobs or trained models that have been started but cannot be allocated on the ML + * nodes currently within the cluster then this will be the *sum* of the ML native memory + * requirements of those jobs/trained models. The metric is in terms of ML native memory, + * not container memory. + * @param wantedExtraProcessors If there are trained model allocations that have been started but cannot be allocated on the + * ML nodes currently within the cluster then this will be the *sum* of the vCPU requirements + * of those allocations. Zero otherwise. + * @param unwantedNodeMemoryBytesToRemove The size of the ML node to be removed, in GB rounded to the nearest GB, + * or zero if no nodes could be removed. */ public record MlAutoscalingStats( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index d7b9f9ddd5b58..8e1bc7af1bdc8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -1341,7 +1341,8 @@ public void testHasPrivilegesOtherThanIndex() { || roleDescriptor.hasConfigurableClusterPrivileges() || roleDescriptor.hasApplicationPrivileges() || roleDescriptor.hasRunAs() - || roleDescriptor.hasRemoteIndicesPrivileges(); + || roleDescriptor.hasRemoteIndicesPrivileges() + || roleDescriptor.hasWorkflowsRestriction(); assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java index aea4a06411e4e..64fb9e8f85b9b 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDriver.java @@ -144,7 +144,7 @@ private static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : mappings, null, lifecycle)) + .template(Template.builder().settings(settings).mappings(mappings).lifecycle(lifecycle)) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match.csv-spec deleted file mode 100644 index 2bc2a865c0052..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match.csv-spec +++ /dev/null @@ -1,47 +0,0 @@ -matchKeywordField -required_capability: match_command - -from books | match "author.keyword: *Stein*" | keep book_no, author | sort book_no; - -book_no:keyword | author:text -7381 | Bettilu Stein Faulkner -; - -matchMultipleTextFields -required_capability: match_command - -from books | match "title:Return* AND author:*Tolkien" | keep book_no, title | sort book_no; - -book_no:keyword | title:text -2714 | Return of the King Being the Third Part of The Lord of the Rings -7350 | Return of the Shadow -; - -matchAllFields -required_capability: match_command - -from books | match "dark AND lord AND Sauron" | keep book_no, title | sort book_no; - -book_no:keyword | title:text -2714 | Return of the King Being the Third Part of The Lord of the Rings -2936 | Fellowship of the Ring 2ND Edition -; - -matchWithWhereFunctionsAndStats -required_capability: match_command - -from books -| match "Faulkner AND ratings:>4.0" -| where year > 1950 and mv_count(author) == 1 -| stats count(*) BY author.keyword -| sort author.keyword -; - -count(*): long | author.keyword:keyword -1 | Bettilu Stein Faulkner -2 | Colleen Faulkner -1 | Danny Faulkner -1 | Keith Faulkner -1 | Paul Faulkner -1 | William Faulkner -; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index a3ef2471d4e56..a5691a16ca50b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -55,7 +55,6 @@ processingCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand - | {this.isDevVersion()}? matchCommand ; whereCommand @@ -312,11 +311,3 @@ lookupCommand inlinestatsCommand : DEV_INLINESTATS stats=fields (BY grouping=fields)? ; - -matchCommand - : DEV_MATCH matchQuery - ; - -matchQuery - : QUOTED_STRING - ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 31a3096c13cd2..f714d4d1808c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -268,11 +268,6 @@ public enum Cap { */ COMBINE_BINARY_COMPARISONS, - /** - * MATCH command support - */ - MATCH_COMMAND(true), - /** * Support for nanosecond dates as a data type */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index c466f9ebb5e53..a29e16139dde7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -187,7 +186,6 @@ else if (p instanceof Lookup lookup) { checkForSortOnSpatialTypes(p, failures); checkFilterMatchConditions(p, failures); - checkMatchCommand(p, failures); checkFullTextQueryFunctions(p, failures); }); checkRemoteEnrich(plan, failures); @@ -644,22 +642,6 @@ private static void checkFilterMatchConditions(LogicalPlan plan, Set fa } } - private static void checkMatchCommand(LogicalPlan plan, Set failures) { - if (plan instanceof Filter f) { - Expression condition = f.condition(); - if (condition instanceof StringQueryPredicate) { - // Similar to cases present in org.elasticsearch.xpack.esql.optimizer.rules.PushDownAndCombineFilters - - // we can't check if it can be pushed down as we don't have yet information about the fields present in the - // StringQueryPredicate - plan.forEachDown(LogicalPlan.class, lp -> { - if ((lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation) == false) { - failures.add(fail(plan, "MATCH cannot be used after {}", lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT))); - } - }); - } - } - } - private static void checkFullTextQueryFunctions(LogicalPlan plan, Set failures) { if (plan instanceof Filter f) { Expression condition = f.condition(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index d1d6aae8c3f52..f7eed3e9be796 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -314,9 +314,7 @@ enrichCommand enrichWithClause lookupCommand inlinestatsCommand -matchCommand -matchQuery atn: -[4, 1, 125, 589, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 132, 8, 1, 10, 1, 12, 1, 135, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 144, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 164, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 183, 8, 5, 10, 5, 12, 5, 186, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 193, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 199, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 207, 8, 5, 10, 5, 12, 5, 210, 9, 5, 1, 6, 1, 6, 3, 6, 214, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 226, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 237, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 243, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 251, 8, 9, 10, 9, 12, 9, 254, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 264, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 269, 8, 10, 10, 10, 12, 10, 272, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 280, 8, 11, 10, 11, 12, 11, 283, 9, 11, 3, 11, 285, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 297, 8, 14, 10, 14, 12, 14, 300, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 307, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 313, 8, 16, 10, 16, 12, 16, 316, 9, 16, 1, 16, 3, 16, 319, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 326, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 334, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 340, 8, 21, 10, 21, 12, 21, 343, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 353, 8, 23, 10, 23, 12, 23, 356, 9, 23, 1, 23, 3, 23, 359, 8, 23, 1, 23, 1, 23, 3, 23, 363, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 370, 8, 25, 1, 25, 1, 25, 3, 25, 374, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 379, 8, 26, 10, 26, 12, 26, 382, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 387, 8, 27, 10, 27, 12, 27, 390, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 395, 8, 28, 10, 28, 12, 28, 398, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 439, 8, 31, 10, 31, 12, 31, 442, 9, 31, 1, 31, 1, 31, 3, 31, 446, 8, 31, 1, 32, 1, 32, 3, 32, 450, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 459, 8, 34, 10, 34, 12, 34, 462, 9, 34, 1, 35, 1, 35, 3, 35, 466, 8, 35, 1, 35, 1, 35, 3, 35, 470, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 482, 8, 38, 10, 38, 12, 38, 485, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 495, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 507, 8, 43, 10, 43, 12, 43, 510, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 520, 8, 46, 1, 47, 3, 47, 523, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 528, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 553, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 559, 8, 55, 10, 55, 12, 55, 562, 9, 55, 3, 55, 564, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 569, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 582, 8, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 0, 4, 2, 10, 18, 20, 61, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 27, 27, 77, 77, 1, 0, 68, 69, 2, 0, 32, 32, 36, 36, 2, 0, 39, 39, 42, 42, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 613, 0, 122, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 4, 143, 1, 0, 0, 0, 6, 163, 1, 0, 0, 0, 8, 165, 1, 0, 0, 0, 10, 198, 1, 0, 0, 0, 12, 225, 1, 0, 0, 0, 14, 227, 1, 0, 0, 0, 16, 236, 1, 0, 0, 0, 18, 242, 1, 0, 0, 0, 20, 263, 1, 0, 0, 0, 22, 273, 1, 0, 0, 0, 24, 288, 1, 0, 0, 0, 26, 290, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 306, 1, 0, 0, 0, 32, 308, 1, 0, 0, 0, 34, 325, 1, 0, 0, 0, 36, 327, 1, 0, 0, 0, 38, 329, 1, 0, 0, 0, 40, 333, 1, 0, 0, 0, 42, 335, 1, 0, 0, 0, 44, 344, 1, 0, 0, 0, 46, 348, 1, 0, 0, 0, 48, 364, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 375, 1, 0, 0, 0, 54, 383, 1, 0, 0, 0, 56, 391, 1, 0, 0, 0, 58, 399, 1, 0, 0, 0, 60, 401, 1, 0, 0, 0, 62, 445, 1, 0, 0, 0, 64, 449, 1, 0, 0, 0, 66, 451, 1, 0, 0, 0, 68, 454, 1, 0, 0, 0, 70, 463, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 474, 1, 0, 0, 0, 76, 477, 1, 0, 0, 0, 78, 486, 1, 0, 0, 0, 80, 490, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 503, 1, 0, 0, 0, 88, 511, 1, 0, 0, 0, 90, 515, 1, 0, 0, 0, 92, 519, 1, 0, 0, 0, 94, 522, 1, 0, 0, 0, 96, 527, 1, 0, 0, 0, 98, 531, 1, 0, 0, 0, 100, 533, 1, 0, 0, 0, 102, 535, 1, 0, 0, 0, 104, 538, 1, 0, 0, 0, 106, 542, 1, 0, 0, 0, 108, 545, 1, 0, 0, 0, 110, 548, 1, 0, 0, 0, 112, 568, 1, 0, 0, 0, 114, 572, 1, 0, 0, 0, 116, 577, 1, 0, 0, 0, 118, 583, 1, 0, 0, 0, 120, 586, 1, 0, 0, 0, 122, 123, 3, 2, 1, 0, 123, 124, 5, 0, 0, 1, 124, 1, 1, 0, 0, 0, 125, 126, 6, 1, -1, 0, 126, 127, 3, 4, 2, 0, 127, 133, 1, 0, 0, 0, 128, 129, 10, 1, 0, 0, 129, 130, 5, 26, 0, 0, 130, 132, 3, 6, 3, 0, 131, 128, 1, 0, 0, 0, 132, 135, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0, 134, 3, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 136, 144, 3, 102, 51, 0, 137, 144, 3, 32, 16, 0, 138, 144, 3, 108, 54, 0, 139, 144, 3, 26, 13, 0, 140, 144, 3, 106, 53, 0, 141, 142, 4, 2, 1, 0, 142, 144, 3, 46, 23, 0, 143, 136, 1, 0, 0, 0, 143, 137, 1, 0, 0, 0, 143, 138, 1, 0, 0, 0, 143, 139, 1, 0, 0, 0, 143, 140, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 144, 5, 1, 0, 0, 0, 145, 164, 3, 48, 24, 0, 146, 164, 3, 8, 4, 0, 147, 164, 3, 72, 36, 0, 148, 164, 3, 66, 33, 0, 149, 164, 3, 50, 25, 0, 150, 164, 3, 68, 34, 0, 151, 164, 3, 74, 37, 0, 152, 164, 3, 76, 38, 0, 153, 164, 3, 80, 40, 0, 154, 164, 3, 82, 41, 0, 155, 164, 3, 110, 55, 0, 156, 164, 3, 84, 42, 0, 157, 158, 4, 3, 2, 0, 158, 164, 3, 116, 58, 0, 159, 160, 4, 3, 3, 0, 160, 164, 3, 114, 57, 0, 161, 162, 4, 3, 4, 0, 162, 164, 3, 118, 59, 0, 163, 145, 1, 0, 0, 0, 163, 146, 1, 0, 0, 0, 163, 147, 1, 0, 0, 0, 163, 148, 1, 0, 0, 0, 163, 149, 1, 0, 0, 0, 163, 150, 1, 0, 0, 0, 163, 151, 1, 0, 0, 0, 163, 152, 1, 0, 0, 0, 163, 153, 1, 0, 0, 0, 163, 154, 1, 0, 0, 0, 163, 155, 1, 0, 0, 0, 163, 156, 1, 0, 0, 0, 163, 157, 1, 0, 0, 0, 163, 159, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 7, 1, 0, 0, 0, 165, 166, 5, 17, 0, 0, 166, 167, 3, 10, 5, 0, 167, 9, 1, 0, 0, 0, 168, 169, 6, 5, -1, 0, 169, 170, 5, 45, 0, 0, 170, 199, 3, 10, 5, 8, 171, 199, 3, 16, 8, 0, 172, 199, 3, 12, 6, 0, 173, 175, 3, 16, 8, 0, 174, 176, 5, 45, 0, 0, 175, 174, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 178, 5, 40, 0, 0, 178, 179, 5, 44, 0, 0, 179, 184, 3, 16, 8, 0, 180, 181, 5, 35, 0, 0, 181, 183, 3, 16, 8, 0, 182, 180, 1, 0, 0, 0, 183, 186, 1, 0, 0, 0, 184, 182, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 187, 1, 0, 0, 0, 186, 184, 1, 0, 0, 0, 187, 188, 5, 51, 0, 0, 188, 199, 1, 0, 0, 0, 189, 190, 3, 16, 8, 0, 190, 192, 5, 41, 0, 0, 191, 193, 5, 45, 0, 0, 192, 191, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 195, 5, 46, 0, 0, 195, 199, 1, 0, 0, 0, 196, 197, 4, 5, 5, 0, 197, 199, 3, 14, 7, 0, 198, 168, 1, 0, 0, 0, 198, 171, 1, 0, 0, 0, 198, 172, 1, 0, 0, 0, 198, 173, 1, 0, 0, 0, 198, 189, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 199, 208, 1, 0, 0, 0, 200, 201, 10, 5, 0, 0, 201, 202, 5, 31, 0, 0, 202, 207, 3, 10, 5, 6, 203, 204, 10, 4, 0, 0, 204, 205, 5, 48, 0, 0, 205, 207, 3, 10, 5, 5, 206, 200, 1, 0, 0, 0, 206, 203, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 11, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 211, 213, 3, 16, 8, 0, 212, 214, 5, 45, 0, 0, 213, 212, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 216, 5, 43, 0, 0, 216, 217, 3, 98, 49, 0, 217, 226, 1, 0, 0, 0, 218, 220, 3, 16, 8, 0, 219, 221, 5, 45, 0, 0, 220, 219, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 5, 50, 0, 0, 223, 224, 3, 98, 49, 0, 224, 226, 1, 0, 0, 0, 225, 211, 1, 0, 0, 0, 225, 218, 1, 0, 0, 0, 226, 13, 1, 0, 0, 0, 227, 228, 3, 16, 8, 0, 228, 229, 5, 20, 0, 0, 229, 230, 3, 98, 49, 0, 230, 15, 1, 0, 0, 0, 231, 237, 3, 18, 9, 0, 232, 233, 3, 18, 9, 0, 233, 234, 3, 100, 50, 0, 234, 235, 3, 18, 9, 0, 235, 237, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 17, 1, 0, 0, 0, 238, 239, 6, 9, -1, 0, 239, 243, 3, 20, 10, 0, 240, 241, 7, 0, 0, 0, 241, 243, 3, 18, 9, 3, 242, 238, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 243, 252, 1, 0, 0, 0, 244, 245, 10, 2, 0, 0, 245, 246, 7, 1, 0, 0, 246, 251, 3, 18, 9, 3, 247, 248, 10, 1, 0, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 2, 250, 244, 1, 0, 0, 0, 250, 247, 1, 0, 0, 0, 251, 254, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253, 19, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 256, 6, 10, -1, 0, 256, 264, 3, 62, 31, 0, 257, 264, 3, 52, 26, 0, 258, 264, 3, 22, 11, 0, 259, 260, 5, 44, 0, 0, 260, 261, 3, 10, 5, 0, 261, 262, 5, 51, 0, 0, 262, 264, 1, 0, 0, 0, 263, 255, 1, 0, 0, 0, 263, 257, 1, 0, 0, 0, 263, 258, 1, 0, 0, 0, 263, 259, 1, 0, 0, 0, 264, 270, 1, 0, 0, 0, 265, 266, 10, 1, 0, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 24, 12, 0, 268, 265, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 21, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 274, 3, 58, 29, 0, 274, 284, 5, 44, 0, 0, 275, 285, 5, 62, 0, 0, 276, 281, 3, 10, 5, 0, 277, 278, 5, 35, 0, 0, 278, 280, 3, 10, 5, 0, 279, 277, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 285, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, 275, 1, 0, 0, 0, 284, 276, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 1, 0, 0, 0, 286, 287, 5, 51, 0, 0, 287, 23, 1, 0, 0, 0, 288, 289, 3, 58, 29, 0, 289, 25, 1, 0, 0, 0, 290, 291, 5, 13, 0, 0, 291, 292, 3, 28, 14, 0, 292, 27, 1, 0, 0, 0, 293, 298, 3, 30, 15, 0, 294, 295, 5, 35, 0, 0, 295, 297, 3, 30, 15, 0, 296, 294, 1, 0, 0, 0, 297, 300, 1, 0, 0, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 29, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 301, 307, 3, 10, 5, 0, 302, 303, 3, 52, 26, 0, 303, 304, 5, 33, 0, 0, 304, 305, 3, 10, 5, 0, 305, 307, 1, 0, 0, 0, 306, 301, 1, 0, 0, 0, 306, 302, 1, 0, 0, 0, 307, 31, 1, 0, 0, 0, 308, 309, 5, 6, 0, 0, 309, 314, 3, 34, 17, 0, 310, 311, 5, 35, 0, 0, 311, 313, 3, 34, 17, 0, 312, 310, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 318, 1, 0, 0, 0, 316, 314, 1, 0, 0, 0, 317, 319, 3, 40, 20, 0, 318, 317, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 33, 1, 0, 0, 0, 320, 321, 3, 36, 18, 0, 321, 322, 5, 109, 0, 0, 322, 323, 3, 38, 19, 0, 323, 326, 1, 0, 0, 0, 324, 326, 3, 38, 19, 0, 325, 320, 1, 0, 0, 0, 325, 324, 1, 0, 0, 0, 326, 35, 1, 0, 0, 0, 327, 328, 5, 77, 0, 0, 328, 37, 1, 0, 0, 0, 329, 330, 7, 2, 0, 0, 330, 39, 1, 0, 0, 0, 331, 334, 3, 42, 21, 0, 332, 334, 3, 44, 22, 0, 333, 331, 1, 0, 0, 0, 333, 332, 1, 0, 0, 0, 334, 41, 1, 0, 0, 0, 335, 336, 5, 76, 0, 0, 336, 341, 5, 77, 0, 0, 337, 338, 5, 35, 0, 0, 338, 340, 5, 77, 0, 0, 339, 337, 1, 0, 0, 0, 340, 343, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 43, 1, 0, 0, 0, 343, 341, 1, 0, 0, 0, 344, 345, 5, 66, 0, 0, 345, 346, 3, 42, 21, 0, 346, 347, 5, 67, 0, 0, 347, 45, 1, 0, 0, 0, 348, 349, 5, 21, 0, 0, 349, 354, 3, 34, 17, 0, 350, 351, 5, 35, 0, 0, 351, 353, 3, 34, 17, 0, 352, 350, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 357, 359, 3, 28, 14, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 361, 5, 30, 0, 0, 361, 363, 3, 28, 14, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 47, 1, 0, 0, 0, 364, 365, 5, 4, 0, 0, 365, 366, 3, 28, 14, 0, 366, 49, 1, 0, 0, 0, 367, 369, 5, 16, 0, 0, 368, 370, 3, 28, 14, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 30, 0, 0, 372, 374, 3, 28, 14, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 51, 1, 0, 0, 0, 375, 380, 3, 58, 29, 0, 376, 377, 5, 37, 0, 0, 377, 379, 3, 58, 29, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 53, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 388, 3, 60, 30, 0, 384, 385, 5, 37, 0, 0, 385, 387, 3, 60, 30, 0, 386, 384, 1, 0, 0, 0, 387, 390, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 55, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 391, 396, 3, 54, 27, 0, 392, 393, 5, 35, 0, 0, 393, 395, 3, 54, 27, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 57, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 400, 7, 3, 0, 0, 400, 59, 1, 0, 0, 0, 401, 402, 5, 81, 0, 0, 402, 61, 1, 0, 0, 0, 403, 446, 5, 46, 0, 0, 404, 405, 3, 96, 48, 0, 405, 406, 5, 68, 0, 0, 406, 446, 1, 0, 0, 0, 407, 446, 3, 94, 47, 0, 408, 446, 3, 96, 48, 0, 409, 446, 3, 90, 45, 0, 410, 446, 3, 64, 32, 0, 411, 446, 3, 98, 49, 0, 412, 413, 5, 66, 0, 0, 413, 418, 3, 92, 46, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 92, 46, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 421, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 422, 5, 67, 0, 0, 422, 446, 1, 0, 0, 0, 423, 424, 5, 66, 0, 0, 424, 429, 3, 90, 45, 0, 425, 426, 5, 35, 0, 0, 426, 428, 3, 90, 45, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 432, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 5, 67, 0, 0, 433, 446, 1, 0, 0, 0, 434, 435, 5, 66, 0, 0, 435, 440, 3, 98, 49, 0, 436, 437, 5, 35, 0, 0, 437, 439, 3, 98, 49, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 443, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 5, 67, 0, 0, 444, 446, 1, 0, 0, 0, 445, 403, 1, 0, 0, 0, 445, 404, 1, 0, 0, 0, 445, 407, 1, 0, 0, 0, 445, 408, 1, 0, 0, 0, 445, 409, 1, 0, 0, 0, 445, 410, 1, 0, 0, 0, 445, 411, 1, 0, 0, 0, 445, 412, 1, 0, 0, 0, 445, 423, 1, 0, 0, 0, 445, 434, 1, 0, 0, 0, 446, 63, 1, 0, 0, 0, 447, 450, 5, 49, 0, 0, 448, 450, 5, 65, 0, 0, 449, 447, 1, 0, 0, 0, 449, 448, 1, 0, 0, 0, 450, 65, 1, 0, 0, 0, 451, 452, 5, 9, 0, 0, 452, 453, 5, 28, 0, 0, 453, 67, 1, 0, 0, 0, 454, 455, 5, 15, 0, 0, 455, 460, 3, 70, 35, 0, 456, 457, 5, 35, 0, 0, 457, 459, 3, 70, 35, 0, 458, 456, 1, 0, 0, 0, 459, 462, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 69, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 463, 465, 3, 10, 5, 0, 464, 466, 7, 4, 0, 0, 465, 464, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 468, 5, 47, 0, 0, 468, 470, 7, 5, 0, 0, 469, 467, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 71, 1, 0, 0, 0, 471, 472, 5, 8, 0, 0, 472, 473, 3, 56, 28, 0, 473, 73, 1, 0, 0, 0, 474, 475, 5, 2, 0, 0, 475, 476, 3, 56, 28, 0, 476, 75, 1, 0, 0, 0, 477, 478, 5, 12, 0, 0, 478, 483, 3, 78, 39, 0, 479, 480, 5, 35, 0, 0, 480, 482, 3, 78, 39, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 487, 3, 54, 27, 0, 487, 488, 5, 85, 0, 0, 488, 489, 3, 54, 27, 0, 489, 79, 1, 0, 0, 0, 490, 491, 5, 1, 0, 0, 491, 492, 3, 20, 10, 0, 492, 494, 3, 98, 49, 0, 493, 495, 3, 86, 43, 0, 494, 493, 1, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 7, 0, 0, 497, 498, 3, 20, 10, 0, 498, 499, 3, 98, 49, 0, 499, 83, 1, 0, 0, 0, 500, 501, 5, 11, 0, 0, 501, 502, 3, 52, 26, 0, 502, 85, 1, 0, 0, 0, 503, 508, 3, 88, 44, 0, 504, 505, 5, 35, 0, 0, 505, 507, 3, 88, 44, 0, 506, 504, 1, 0, 0, 0, 507, 510, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 508, 509, 1, 0, 0, 0, 509, 87, 1, 0, 0, 0, 510, 508, 1, 0, 0, 0, 511, 512, 3, 58, 29, 0, 512, 513, 5, 33, 0, 0, 513, 514, 3, 62, 31, 0, 514, 89, 1, 0, 0, 0, 515, 516, 7, 6, 0, 0, 516, 91, 1, 0, 0, 0, 517, 520, 3, 94, 47, 0, 518, 520, 3, 96, 48, 0, 519, 517, 1, 0, 0, 0, 519, 518, 1, 0, 0, 0, 520, 93, 1, 0, 0, 0, 521, 523, 7, 0, 0, 0, 522, 521, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 5, 29, 0, 0, 525, 95, 1, 0, 0, 0, 526, 528, 7, 0, 0, 0, 527, 526, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 530, 5, 28, 0, 0, 530, 97, 1, 0, 0, 0, 531, 532, 5, 27, 0, 0, 532, 99, 1, 0, 0, 0, 533, 534, 7, 7, 0, 0, 534, 101, 1, 0, 0, 0, 535, 536, 5, 5, 0, 0, 536, 537, 3, 104, 52, 0, 537, 103, 1, 0, 0, 0, 538, 539, 5, 66, 0, 0, 539, 540, 3, 2, 1, 0, 540, 541, 5, 67, 0, 0, 541, 105, 1, 0, 0, 0, 542, 543, 5, 14, 0, 0, 543, 544, 5, 101, 0, 0, 544, 107, 1, 0, 0, 0, 545, 546, 5, 10, 0, 0, 546, 547, 5, 105, 0, 0, 547, 109, 1, 0, 0, 0, 548, 549, 5, 3, 0, 0, 549, 552, 5, 91, 0, 0, 550, 551, 5, 89, 0, 0, 551, 553, 3, 54, 27, 0, 552, 550, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 563, 1, 0, 0, 0, 554, 555, 5, 90, 0, 0, 555, 560, 3, 112, 56, 0, 556, 557, 5, 35, 0, 0, 557, 559, 3, 112, 56, 0, 558, 556, 1, 0, 0, 0, 559, 562, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 564, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 563, 554, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 111, 1, 0, 0, 0, 565, 566, 3, 54, 27, 0, 566, 567, 5, 33, 0, 0, 567, 569, 1, 0, 0, 0, 568, 565, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 3, 54, 27, 0, 571, 113, 1, 0, 0, 0, 572, 573, 5, 19, 0, 0, 573, 574, 3, 34, 17, 0, 574, 575, 5, 89, 0, 0, 575, 576, 3, 56, 28, 0, 576, 115, 1, 0, 0, 0, 577, 578, 5, 18, 0, 0, 578, 581, 3, 28, 14, 0, 579, 580, 5, 30, 0, 0, 580, 582, 3, 28, 14, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 117, 1, 0, 0, 0, 583, 584, 5, 20, 0, 0, 584, 585, 3, 120, 60, 0, 585, 119, 1, 0, 0, 0, 586, 587, 5, 27, 0, 0, 587, 121, 1, 0, 0, 0, 54, 133, 143, 163, 175, 184, 192, 198, 206, 208, 213, 220, 225, 236, 242, 250, 252, 263, 270, 281, 284, 298, 306, 314, 318, 325, 333, 341, 354, 358, 362, 369, 373, 380, 388, 396, 418, 429, 440, 445, 449, 460, 465, 469, 483, 494, 508, 519, 522, 527, 552, 560, 563, 568, 581] \ No newline at end of file +[4, 1, 125, 578, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 128, 8, 1, 10, 1, 12, 1, 131, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 140, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 158, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 177, 8, 5, 10, 5, 12, 5, 180, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 187, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 193, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 201, 8, 5, 10, 5, 12, 5, 204, 9, 5, 1, 6, 1, 6, 3, 6, 208, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 215, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 231, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 245, 8, 9, 10, 9, 12, 9, 248, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 263, 8, 10, 10, 10, 12, 10, 266, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 274, 8, 11, 10, 11, 12, 11, 277, 9, 11, 3, 11, 279, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 291, 8, 14, 10, 14, 12, 14, 294, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 301, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 307, 8, 16, 10, 16, 12, 16, 310, 9, 16, 1, 16, 3, 16, 313, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 328, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 334, 8, 21, 10, 21, 12, 21, 337, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 347, 8, 23, 10, 23, 12, 23, 350, 9, 23, 1, 23, 3, 23, 353, 8, 23, 1, 23, 1, 23, 3, 23, 357, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 364, 8, 25, 1, 25, 1, 25, 3, 25, 368, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 373, 8, 26, 10, 26, 12, 26, 376, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 381, 8, 27, 10, 27, 12, 27, 384, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 389, 8, 28, 10, 28, 12, 28, 392, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 422, 8, 31, 10, 31, 12, 31, 425, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 31, 1, 31, 3, 31, 440, 8, 31, 1, 32, 1, 32, 3, 32, 444, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 453, 8, 34, 10, 34, 12, 34, 456, 9, 34, 1, 35, 1, 35, 3, 35, 460, 8, 35, 1, 35, 1, 35, 3, 35, 464, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 476, 8, 38, 10, 38, 12, 38, 479, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 489, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 501, 8, 43, 10, 43, 12, 43, 504, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 514, 8, 46, 1, 47, 3, 47, 517, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 522, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 547, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 553, 8, 55, 10, 55, 12, 55, 556, 9, 55, 3, 55, 558, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 563, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 576, 8, 58, 1, 58, 0, 4, 2, 10, 18, 20, 59, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 27, 27, 77, 77, 1, 0, 68, 69, 2, 0, 32, 32, 36, 36, 2, 0, 39, 39, 42, 42, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 603, 0, 118, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 4, 139, 1, 0, 0, 0, 6, 157, 1, 0, 0, 0, 8, 159, 1, 0, 0, 0, 10, 192, 1, 0, 0, 0, 12, 219, 1, 0, 0, 0, 14, 221, 1, 0, 0, 0, 16, 230, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 267, 1, 0, 0, 0, 24, 282, 1, 0, 0, 0, 26, 284, 1, 0, 0, 0, 28, 287, 1, 0, 0, 0, 30, 300, 1, 0, 0, 0, 32, 302, 1, 0, 0, 0, 34, 319, 1, 0, 0, 0, 36, 321, 1, 0, 0, 0, 38, 323, 1, 0, 0, 0, 40, 327, 1, 0, 0, 0, 42, 329, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 342, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 361, 1, 0, 0, 0, 52, 369, 1, 0, 0, 0, 54, 377, 1, 0, 0, 0, 56, 385, 1, 0, 0, 0, 58, 393, 1, 0, 0, 0, 60, 395, 1, 0, 0, 0, 62, 439, 1, 0, 0, 0, 64, 443, 1, 0, 0, 0, 66, 445, 1, 0, 0, 0, 68, 448, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 465, 1, 0, 0, 0, 74, 468, 1, 0, 0, 0, 76, 471, 1, 0, 0, 0, 78, 480, 1, 0, 0, 0, 80, 484, 1, 0, 0, 0, 82, 490, 1, 0, 0, 0, 84, 494, 1, 0, 0, 0, 86, 497, 1, 0, 0, 0, 88, 505, 1, 0, 0, 0, 90, 509, 1, 0, 0, 0, 92, 513, 1, 0, 0, 0, 94, 516, 1, 0, 0, 0, 96, 521, 1, 0, 0, 0, 98, 525, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 529, 1, 0, 0, 0, 104, 532, 1, 0, 0, 0, 106, 536, 1, 0, 0, 0, 108, 539, 1, 0, 0, 0, 110, 542, 1, 0, 0, 0, 112, 562, 1, 0, 0, 0, 114, 566, 1, 0, 0, 0, 116, 571, 1, 0, 0, 0, 118, 119, 3, 2, 1, 0, 119, 120, 5, 0, 0, 1, 120, 1, 1, 0, 0, 0, 121, 122, 6, 1, -1, 0, 122, 123, 3, 4, 2, 0, 123, 129, 1, 0, 0, 0, 124, 125, 10, 1, 0, 0, 125, 126, 5, 26, 0, 0, 126, 128, 3, 6, 3, 0, 127, 124, 1, 0, 0, 0, 128, 131, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 3, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 132, 140, 3, 102, 51, 0, 133, 140, 3, 32, 16, 0, 134, 140, 3, 108, 54, 0, 135, 140, 3, 26, 13, 0, 136, 140, 3, 106, 53, 0, 137, 138, 4, 2, 1, 0, 138, 140, 3, 46, 23, 0, 139, 132, 1, 0, 0, 0, 139, 133, 1, 0, 0, 0, 139, 134, 1, 0, 0, 0, 139, 135, 1, 0, 0, 0, 139, 136, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 5, 1, 0, 0, 0, 141, 158, 3, 48, 24, 0, 142, 158, 3, 8, 4, 0, 143, 158, 3, 72, 36, 0, 144, 158, 3, 66, 33, 0, 145, 158, 3, 50, 25, 0, 146, 158, 3, 68, 34, 0, 147, 158, 3, 74, 37, 0, 148, 158, 3, 76, 38, 0, 149, 158, 3, 80, 40, 0, 150, 158, 3, 82, 41, 0, 151, 158, 3, 110, 55, 0, 152, 158, 3, 84, 42, 0, 153, 154, 4, 3, 2, 0, 154, 158, 3, 116, 58, 0, 155, 156, 4, 3, 3, 0, 156, 158, 3, 114, 57, 0, 157, 141, 1, 0, 0, 0, 157, 142, 1, 0, 0, 0, 157, 143, 1, 0, 0, 0, 157, 144, 1, 0, 0, 0, 157, 145, 1, 0, 0, 0, 157, 146, 1, 0, 0, 0, 157, 147, 1, 0, 0, 0, 157, 148, 1, 0, 0, 0, 157, 149, 1, 0, 0, 0, 157, 150, 1, 0, 0, 0, 157, 151, 1, 0, 0, 0, 157, 152, 1, 0, 0, 0, 157, 153, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 7, 1, 0, 0, 0, 159, 160, 5, 17, 0, 0, 160, 161, 3, 10, 5, 0, 161, 9, 1, 0, 0, 0, 162, 163, 6, 5, -1, 0, 163, 164, 5, 45, 0, 0, 164, 193, 3, 10, 5, 8, 165, 193, 3, 16, 8, 0, 166, 193, 3, 12, 6, 0, 167, 169, 3, 16, 8, 0, 168, 170, 5, 45, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 40, 0, 0, 172, 173, 5, 44, 0, 0, 173, 178, 3, 16, 8, 0, 174, 175, 5, 35, 0, 0, 175, 177, 3, 16, 8, 0, 176, 174, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 178, 179, 1, 0, 0, 0, 179, 181, 1, 0, 0, 0, 180, 178, 1, 0, 0, 0, 181, 182, 5, 51, 0, 0, 182, 193, 1, 0, 0, 0, 183, 184, 3, 16, 8, 0, 184, 186, 5, 41, 0, 0, 185, 187, 5, 45, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 46, 0, 0, 189, 193, 1, 0, 0, 0, 190, 191, 4, 5, 4, 0, 191, 193, 3, 14, 7, 0, 192, 162, 1, 0, 0, 0, 192, 165, 1, 0, 0, 0, 192, 166, 1, 0, 0, 0, 192, 167, 1, 0, 0, 0, 192, 183, 1, 0, 0, 0, 192, 190, 1, 0, 0, 0, 193, 202, 1, 0, 0, 0, 194, 195, 10, 5, 0, 0, 195, 196, 5, 31, 0, 0, 196, 201, 3, 10, 5, 6, 197, 198, 10, 4, 0, 0, 198, 199, 5, 48, 0, 0, 199, 201, 3, 10, 5, 5, 200, 194, 1, 0, 0, 0, 200, 197, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 11, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 207, 3, 16, 8, 0, 206, 208, 5, 45, 0, 0, 207, 206, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 5, 43, 0, 0, 210, 211, 3, 98, 49, 0, 211, 220, 1, 0, 0, 0, 212, 214, 3, 16, 8, 0, 213, 215, 5, 45, 0, 0, 214, 213, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 5, 50, 0, 0, 217, 218, 3, 98, 49, 0, 218, 220, 1, 0, 0, 0, 219, 205, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 220, 13, 1, 0, 0, 0, 221, 222, 3, 16, 8, 0, 222, 223, 5, 20, 0, 0, 223, 224, 3, 98, 49, 0, 224, 15, 1, 0, 0, 0, 225, 231, 3, 18, 9, 0, 226, 227, 3, 18, 9, 0, 227, 228, 3, 100, 50, 0, 228, 229, 3, 18, 9, 0, 229, 231, 1, 0, 0, 0, 230, 225, 1, 0, 0, 0, 230, 226, 1, 0, 0, 0, 231, 17, 1, 0, 0, 0, 232, 233, 6, 9, -1, 0, 233, 237, 3, 20, 10, 0, 234, 235, 7, 0, 0, 0, 235, 237, 3, 18, 9, 3, 236, 232, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 246, 1, 0, 0, 0, 238, 239, 10, 2, 0, 0, 239, 240, 7, 1, 0, 0, 240, 245, 3, 18, 9, 3, 241, 242, 10, 1, 0, 0, 242, 243, 7, 0, 0, 0, 243, 245, 3, 18, 9, 2, 244, 238, 1, 0, 0, 0, 244, 241, 1, 0, 0, 0, 245, 248, 1, 0, 0, 0, 246, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 19, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 249, 250, 6, 10, -1, 0, 250, 258, 3, 62, 31, 0, 251, 258, 3, 52, 26, 0, 252, 258, 3, 22, 11, 0, 253, 254, 5, 44, 0, 0, 254, 255, 3, 10, 5, 0, 255, 256, 5, 51, 0, 0, 256, 258, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 251, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 264, 1, 0, 0, 0, 259, 260, 10, 1, 0, 0, 260, 261, 5, 34, 0, 0, 261, 263, 3, 24, 12, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 21, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 3, 58, 29, 0, 268, 278, 5, 44, 0, 0, 269, 279, 5, 62, 0, 0, 270, 275, 3, 10, 5, 0, 271, 272, 5, 35, 0, 0, 272, 274, 3, 10, 5, 0, 273, 271, 1, 0, 0, 0, 274, 277, 1, 0, 0, 0, 275, 273, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 279, 1, 0, 0, 0, 277, 275, 1, 0, 0, 0, 278, 269, 1, 0, 0, 0, 278, 270, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 51, 0, 0, 281, 23, 1, 0, 0, 0, 282, 283, 3, 58, 29, 0, 283, 25, 1, 0, 0, 0, 284, 285, 5, 13, 0, 0, 285, 286, 3, 28, 14, 0, 286, 27, 1, 0, 0, 0, 287, 292, 3, 30, 15, 0, 288, 289, 5, 35, 0, 0, 289, 291, 3, 30, 15, 0, 290, 288, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 29, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 295, 301, 3, 10, 5, 0, 296, 297, 3, 52, 26, 0, 297, 298, 5, 33, 0, 0, 298, 299, 3, 10, 5, 0, 299, 301, 1, 0, 0, 0, 300, 295, 1, 0, 0, 0, 300, 296, 1, 0, 0, 0, 301, 31, 1, 0, 0, 0, 302, 303, 5, 6, 0, 0, 303, 308, 3, 34, 17, 0, 304, 305, 5, 35, 0, 0, 305, 307, 3, 34, 17, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 312, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 313, 3, 40, 20, 0, 312, 311, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 33, 1, 0, 0, 0, 314, 315, 3, 36, 18, 0, 315, 316, 5, 109, 0, 0, 316, 317, 3, 38, 19, 0, 317, 320, 1, 0, 0, 0, 318, 320, 3, 38, 19, 0, 319, 314, 1, 0, 0, 0, 319, 318, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 5, 77, 0, 0, 322, 37, 1, 0, 0, 0, 323, 324, 7, 2, 0, 0, 324, 39, 1, 0, 0, 0, 325, 328, 3, 42, 21, 0, 326, 328, 3, 44, 22, 0, 327, 325, 1, 0, 0, 0, 327, 326, 1, 0, 0, 0, 328, 41, 1, 0, 0, 0, 329, 330, 5, 76, 0, 0, 330, 335, 5, 77, 0, 0, 331, 332, 5, 35, 0, 0, 332, 334, 5, 77, 0, 0, 333, 331, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 333, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 43, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 338, 339, 5, 66, 0, 0, 339, 340, 3, 42, 21, 0, 340, 341, 5, 67, 0, 0, 341, 45, 1, 0, 0, 0, 342, 343, 5, 21, 0, 0, 343, 348, 3, 34, 17, 0, 344, 345, 5, 35, 0, 0, 345, 347, 3, 34, 17, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 28, 14, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 30, 0, 0, 355, 357, 3, 28, 14, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 4, 0, 0, 359, 360, 3, 28, 14, 0, 360, 49, 1, 0, 0, 0, 361, 363, 5, 16, 0, 0, 362, 364, 3, 28, 14, 0, 363, 362, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 366, 5, 30, 0, 0, 366, 368, 3, 28, 14, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 51, 1, 0, 0, 0, 369, 374, 3, 58, 29, 0, 370, 371, 5, 37, 0, 0, 371, 373, 3, 58, 29, 0, 372, 370, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 374, 1, 0, 0, 0, 377, 382, 3, 60, 30, 0, 378, 379, 5, 37, 0, 0, 379, 381, 3, 60, 30, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 55, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 390, 3, 54, 27, 0, 386, 387, 5, 35, 0, 0, 387, 389, 3, 54, 27, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 57, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 394, 7, 3, 0, 0, 394, 59, 1, 0, 0, 0, 395, 396, 5, 81, 0, 0, 396, 61, 1, 0, 0, 0, 397, 440, 5, 46, 0, 0, 398, 399, 3, 96, 48, 0, 399, 400, 5, 68, 0, 0, 400, 440, 1, 0, 0, 0, 401, 440, 3, 94, 47, 0, 402, 440, 3, 96, 48, 0, 403, 440, 3, 90, 45, 0, 404, 440, 3, 64, 32, 0, 405, 440, 3, 98, 49, 0, 406, 407, 5, 66, 0, 0, 407, 412, 3, 92, 46, 0, 408, 409, 5, 35, 0, 0, 409, 411, 3, 92, 46, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 415, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 5, 67, 0, 0, 416, 440, 1, 0, 0, 0, 417, 418, 5, 66, 0, 0, 418, 423, 3, 90, 45, 0, 419, 420, 5, 35, 0, 0, 420, 422, 3, 90, 45, 0, 421, 419, 1, 0, 0, 0, 422, 425, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 426, 1, 0, 0, 0, 425, 423, 1, 0, 0, 0, 426, 427, 5, 67, 0, 0, 427, 440, 1, 0, 0, 0, 428, 429, 5, 66, 0, 0, 429, 434, 3, 98, 49, 0, 430, 431, 5, 35, 0, 0, 431, 433, 3, 98, 49, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 437, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 5, 67, 0, 0, 438, 440, 1, 0, 0, 0, 439, 397, 1, 0, 0, 0, 439, 398, 1, 0, 0, 0, 439, 401, 1, 0, 0, 0, 439, 402, 1, 0, 0, 0, 439, 403, 1, 0, 0, 0, 439, 404, 1, 0, 0, 0, 439, 405, 1, 0, 0, 0, 439, 406, 1, 0, 0, 0, 439, 417, 1, 0, 0, 0, 439, 428, 1, 0, 0, 0, 440, 63, 1, 0, 0, 0, 441, 444, 5, 49, 0, 0, 442, 444, 5, 65, 0, 0, 443, 441, 1, 0, 0, 0, 443, 442, 1, 0, 0, 0, 444, 65, 1, 0, 0, 0, 445, 446, 5, 9, 0, 0, 446, 447, 5, 28, 0, 0, 447, 67, 1, 0, 0, 0, 448, 449, 5, 15, 0, 0, 449, 454, 3, 70, 35, 0, 450, 451, 5, 35, 0, 0, 451, 453, 3, 70, 35, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 459, 3, 10, 5, 0, 458, 460, 7, 4, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 463, 1, 0, 0, 0, 461, 462, 5, 47, 0, 0, 462, 464, 7, 5, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 71, 1, 0, 0, 0, 465, 466, 5, 8, 0, 0, 466, 467, 3, 56, 28, 0, 467, 73, 1, 0, 0, 0, 468, 469, 5, 2, 0, 0, 469, 470, 3, 56, 28, 0, 470, 75, 1, 0, 0, 0, 471, 472, 5, 12, 0, 0, 472, 477, 3, 78, 39, 0, 473, 474, 5, 35, 0, 0, 474, 476, 3, 78, 39, 0, 475, 473, 1, 0, 0, 0, 476, 479, 1, 0, 0, 0, 477, 475, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 77, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 480, 481, 3, 54, 27, 0, 481, 482, 5, 85, 0, 0, 482, 483, 3, 54, 27, 0, 483, 79, 1, 0, 0, 0, 484, 485, 5, 1, 0, 0, 485, 486, 3, 20, 10, 0, 486, 488, 3, 98, 49, 0, 487, 489, 3, 86, 43, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 81, 1, 0, 0, 0, 490, 491, 5, 7, 0, 0, 491, 492, 3, 20, 10, 0, 492, 493, 3, 98, 49, 0, 493, 83, 1, 0, 0, 0, 494, 495, 5, 11, 0, 0, 495, 496, 3, 52, 26, 0, 496, 85, 1, 0, 0, 0, 497, 502, 3, 88, 44, 0, 498, 499, 5, 35, 0, 0, 499, 501, 3, 88, 44, 0, 500, 498, 1, 0, 0, 0, 501, 504, 1, 0, 0, 0, 502, 500, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 87, 1, 0, 0, 0, 504, 502, 1, 0, 0, 0, 505, 506, 3, 58, 29, 0, 506, 507, 5, 33, 0, 0, 507, 508, 3, 62, 31, 0, 508, 89, 1, 0, 0, 0, 509, 510, 7, 6, 0, 0, 510, 91, 1, 0, 0, 0, 511, 514, 3, 94, 47, 0, 512, 514, 3, 96, 48, 0, 513, 511, 1, 0, 0, 0, 513, 512, 1, 0, 0, 0, 514, 93, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 29, 0, 0, 519, 95, 1, 0, 0, 0, 520, 522, 7, 0, 0, 0, 521, 520, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 5, 28, 0, 0, 524, 97, 1, 0, 0, 0, 525, 526, 5, 27, 0, 0, 526, 99, 1, 0, 0, 0, 527, 528, 7, 7, 0, 0, 528, 101, 1, 0, 0, 0, 529, 530, 5, 5, 0, 0, 530, 531, 3, 104, 52, 0, 531, 103, 1, 0, 0, 0, 532, 533, 5, 66, 0, 0, 533, 534, 3, 2, 1, 0, 534, 535, 5, 67, 0, 0, 535, 105, 1, 0, 0, 0, 536, 537, 5, 14, 0, 0, 537, 538, 5, 101, 0, 0, 538, 107, 1, 0, 0, 0, 539, 540, 5, 10, 0, 0, 540, 541, 5, 105, 0, 0, 541, 109, 1, 0, 0, 0, 542, 543, 5, 3, 0, 0, 543, 546, 5, 91, 0, 0, 544, 545, 5, 89, 0, 0, 545, 547, 3, 54, 27, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 557, 1, 0, 0, 0, 548, 549, 5, 90, 0, 0, 549, 554, 3, 112, 56, 0, 550, 551, 5, 35, 0, 0, 551, 553, 3, 112, 56, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 558, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 548, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 111, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 561, 5, 33, 0, 0, 561, 563, 1, 0, 0, 0, 562, 559, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 3, 54, 27, 0, 565, 113, 1, 0, 0, 0, 566, 567, 5, 19, 0, 0, 567, 568, 3, 34, 17, 0, 568, 569, 5, 89, 0, 0, 569, 570, 3, 56, 28, 0, 570, 115, 1, 0, 0, 0, 571, 572, 5, 18, 0, 0, 572, 575, 3, 28, 14, 0, 573, 574, 5, 30, 0, 0, 574, 576, 3, 28, 14, 0, 575, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 117, 1, 0, 0, 0, 54, 129, 139, 157, 169, 178, 186, 192, 200, 202, 207, 214, 219, 230, 236, 244, 246, 257, 264, 275, 278, 292, 300, 308, 312, 319, 327, 335, 348, 352, 356, 363, 367, 374, 382, 390, 412, 423, 434, 439, 443, 454, 459, 463, 477, 488, 502, 513, 516, 521, 546, 554, 557, 562, 575] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index fb63e31a37c90..578da6fe786ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -70,8 +70,7 @@ public class EsqlBaseParser extends ParserConfig { RULE_integerValue = 48, RULE_string = 49, RULE_comparisonOperator = 50, RULE_explainCommand = 51, RULE_subqueryExpression = 52, RULE_showCommand = 53, RULE_metaCommand = 54, RULE_enrichCommand = 55, RULE_enrichWithClause = 56, - RULE_lookupCommand = 57, RULE_inlinestatsCommand = 58, RULE_matchCommand = 59, - RULE_matchQuery = 60; + RULE_lookupCommand = 57, RULE_inlinestatsCommand = 58; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -86,8 +85,7 @@ private static String[] makeRuleNames() { "mvExpandCommand", "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "metaCommand", - "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand", - "matchCommand", "matchQuery" + "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -222,9 +220,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(122); + setState(118); query(0); - setState(123); + setState(119); match(EOF); } } @@ -320,11 +318,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(126); + setState(122); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(133); + setState(129); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -335,16 +333,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(128); + setState(124); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(129); + setState(125); match(PIPE); - setState(130); + setState(126); processingCommand(); } } } - setState(135); + setState(131); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -405,50 +403,50 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(143); + setState(139); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(136); + setState(132); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(137); + setState(133); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(138); + setState(134); metaCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(139); + setState(135); rowCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(140); + setState(136); showCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(141); + setState(137); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(142); + setState(138); metricsCommand(); } break; @@ -509,9 +507,6 @@ public InlinestatsCommandContext inlinestatsCommand() { public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } - public MatchCommandContext matchCommand() { - return getRuleContext(MatchCommandContext.class,0); - } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -536,120 +531,111 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(163); + setState(157); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(145); + setState(141); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(146); + setState(142); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(147); + setState(143); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(148); + setState(144); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(149); + setState(145); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(150); + setState(146); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(151); + setState(147); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(152); + setState(148); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(153); + setState(149); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(154); + setState(150); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(155); + setState(151); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(156); + setState(152); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(157); + setState(153); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(158); + setState(154); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(159); + setState(155); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(160); + setState(156); lookupCommand(); } break; - case 15: - enterOuterAlt(_localctx, 15); - { - setState(161); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(162); - matchCommand(); - } - break; } } catch (RecognitionException re) { @@ -695,9 +681,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(165); + setState(159); match(WHERE); - setState(166); + setState(160); booleanExpression(0); } } @@ -913,7 +899,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(198); + setState(192); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -922,9 +908,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(169); + setState(163); match(NOT); - setState(170); + setState(164); booleanExpression(8); } break; @@ -933,7 +919,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(171); + setState(165); valueExpression(); } break; @@ -942,7 +928,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(172); + setState(166); regexBooleanExpression(); } break; @@ -951,41 +937,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(173); + setState(167); valueExpression(); - setState(175); + setState(169); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(174); + setState(168); match(NOT); } } - setState(177); + setState(171); match(IN); - setState(178); + setState(172); match(LP); - setState(179); + setState(173); valueExpression(); - setState(184); + setState(178); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(180); + setState(174); match(COMMA); - setState(181); + setState(175); valueExpression(); } } - setState(186); + setState(180); _errHandler.sync(this); _la = _input.LA(1); } - setState(187); + setState(181); match(RP); } break; @@ -994,21 +980,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(189); + setState(183); valueExpression(); - setState(190); + setState(184); match(IS); - setState(192); + setState(186); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(191); + setState(185); match(NOT); } } - setState(194); + setState(188); match(NULL); } break; @@ -1017,15 +1003,15 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(196); + setState(190); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(197); + setState(191); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(208); + setState(202); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1033,7 +1019,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(206); + setState(200); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1041,11 +1027,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(200); + setState(194); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(201); + setState(195); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(202); + setState(196); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1054,18 +1040,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(203); + setState(197); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(204); + setState(198); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(205); + setState(199); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(210); + setState(204); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1120,48 +1106,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(225); + setState(219); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(211); + setState(205); valueExpression(); - setState(213); + setState(207); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(212); + setState(206); match(NOT); } } - setState(215); + setState(209); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(216); + setState(210); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(218); + setState(212); valueExpression(); - setState(220); + setState(214); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(219); + setState(213); match(NOT); } } - setState(222); + setState(216); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(223); + setState(217); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1214,11 +1200,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(227); + setState(221); valueExpression(); - setState(228); + setState(222); match(DEV_MATCH); - setState(229); + setState(223); ((MatchBooleanExpressionContext)_localctx).queryString = string(); } } @@ -1302,14 +1288,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(236); + setState(230); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(231); + setState(225); operatorExpression(0); } break; @@ -1317,11 +1303,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(232); + setState(226); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(233); + setState(227); comparisonOperator(); - setState(234); + setState(228); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1446,7 +1432,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(242); + setState(236); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1455,7 +1441,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(239); + setState(233); primaryExpression(0); } break; @@ -1464,7 +1450,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(240); + setState(234); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1475,13 +1461,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(241); + setState(235); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(252); + setState(246); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1489,7 +1475,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(250); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1497,9 +1483,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(244); + setState(238); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(245); + setState(239); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { @@ -1510,7 +1496,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(246); + setState(240); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1519,9 +1505,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(247); + setState(241); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(248); + setState(242); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1532,14 +1518,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(249); + setState(243); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(254); + setState(248); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1697,7 +1683,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(263); + setState(257); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1706,7 +1692,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(256); + setState(250); constant(); } break; @@ -1715,7 +1701,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(257); + setState(251); qualifiedName(); } break; @@ -1724,7 +1710,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(258); + setState(252); functionExpression(); } break; @@ -1733,17 +1719,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(259); + setState(253); match(LP); - setState(260); + setState(254); booleanExpression(0); - setState(261); + setState(255); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(270); + setState(264); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1754,16 +1740,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(265); + setState(259); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(266); + setState(260); match(CAST_OP); - setState(267); + setState(261); dataType(); } } } - setState(272); + setState(266); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1825,37 +1811,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(273); + setState(267); identifier(); - setState(274); + setState(268); match(LP); - setState(284); + setState(278); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(275); + setState(269); match(ASTERISK); } break; case 2: { { - setState(276); + setState(270); booleanExpression(0); - setState(281); + setState(275); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(277); + setState(271); match(COMMA); - setState(278); + setState(272); booleanExpression(0); } } - setState(283); + setState(277); _errHandler.sync(this); _la = _input.LA(1); } @@ -1863,7 +1849,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(286); + setState(280); match(RP); } } @@ -1921,7 +1907,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(288); + setState(282); identifier(); } } @@ -1968,9 +1954,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(284); match(ROW); - setState(291); + setState(285); fields(); } } @@ -2024,23 +2010,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(293); + setState(287); field(); - setState(298); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(294); + setState(288); match(COMMA); - setState(295); + setState(289); field(); } } } - setState(300); + setState(294); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2090,24 +2076,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 30, RULE_field); try { - setState(306); + setState(300); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(301); + setState(295); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(302); + setState(296); qualifiedName(); - setState(303); + setState(297); match(ASSIGN); - setState(304); + setState(298); booleanExpression(0); } break; @@ -2167,34 +2153,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(308); + setState(302); match(FROM); - setState(309); + setState(303); indexPattern(); - setState(314); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(310); + setState(304); match(COMMA); - setState(311); + setState(305); indexPattern(); } } } - setState(316); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(318); + setState(312); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(317); + setState(311); metadata(); } break; @@ -2245,24 +2231,24 @@ public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); enterRule(_localctx, 34, RULE_indexPattern); try { - setState(325); + setState(319); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(320); + setState(314); clusterString(); - setState(321); + setState(315); match(COLON); - setState(322); + setState(316); indexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(324); + setState(318); indexString(); } break; @@ -2308,7 +2294,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(327); + setState(321); match(UNQUOTED_SOURCE); } } @@ -2354,7 +2340,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(329); + setState(323); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2409,20 +2395,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 40, RULE_metadata); try { - setState(333); + setState(327); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(331); + setState(325); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(332); + setState(326); deprecated_metadata(); } break; @@ -2479,25 +2465,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(335); + setState(329); match(METADATA); - setState(336); + setState(330); match(UNQUOTED_SOURCE); - setState(341); + setState(335); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(337); + setState(331); match(COMMA); - setState(338); + setState(332); match(UNQUOTED_SOURCE); } } } - setState(343); + setState(337); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2546,11 +2532,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(344); + setState(338); match(OPENING_BRACKET); - setState(345); + setState(339); metadataOption(); - setState(346); + setState(340); match(CLOSING_BRACKET); } } @@ -2614,46 +2600,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(348); + setState(342); match(DEV_METRICS); - setState(349); + setState(343); indexPattern(); - setState(354); + setState(348); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(350); + setState(344); match(COMMA); - setState(351); + setState(345); indexPattern(); } } } - setState(356); + setState(350); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(358); + setState(352); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(357); + setState(351); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(362); + setState(356); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(360); + setState(354); match(BY); - setState(361); + setState(355); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2703,9 +2689,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(364); + setState(358); match(EVAL); - setState(365); + setState(359); fields(); } } @@ -2758,26 +2744,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(367); + setState(361); match(STATS); - setState(369); + setState(363); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(368); + setState(362); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(373); + setState(367); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(371); + setState(365); match(BY); - setState(372); + setState(366); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2834,23 +2820,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(375); + setState(369); identifier(); - setState(380); + setState(374); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(376); + setState(370); match(DOT); - setState(377); + setState(371); identifier(); } } } - setState(382); + setState(376); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2906,23 +2892,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(383); + setState(377); identifierPattern(); - setState(388); + setState(382); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(384); + setState(378); match(DOT); - setState(385); + setState(379); identifierPattern(); } } } - setState(390); + setState(384); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } @@ -2978,23 +2964,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(391); + setState(385); qualifiedNamePattern(); - setState(396); + setState(390); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(392); + setState(386); match(COMMA); - setState(393); + setState(387); qualifiedNamePattern(); } } } - setState(398); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3042,7 +3028,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(399); + setState(393); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3094,7 +3080,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(401); + setState(395); match(ID_PATTERN); } } @@ -3365,14 +3351,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 62, RULE_constant); int _la; try { - setState(445); + setState(439); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(403); + setState(397); match(NULL); } break; @@ -3380,9 +3366,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(404); + setState(398); integerValue(); - setState(405); + setState(399); match(UNQUOTED_IDENTIFIER); } break; @@ -3390,7 +3376,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(407); + setState(401); decimalValue(); } break; @@ -3398,7 +3384,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(408); + setState(402); integerValue(); } break; @@ -3406,7 +3392,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(409); + setState(403); booleanValue(); } break; @@ -3414,7 +3400,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(410); + setState(404); params(); } break; @@ -3422,7 +3408,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(411); + setState(405); string(); } break; @@ -3430,27 +3416,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(412); + setState(406); match(OPENING_BRACKET); - setState(413); + setState(407); numericValue(); - setState(418); + setState(412); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(414); + setState(408); match(COMMA); - setState(415); + setState(409); numericValue(); } } - setState(420); + setState(414); _errHandler.sync(this); _la = _input.LA(1); } - setState(421); + setState(415); match(CLOSING_BRACKET); } break; @@ -3458,27 +3444,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(423); + setState(417); match(OPENING_BRACKET); - setState(424); + setState(418); booleanValue(); - setState(429); + setState(423); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(425); + setState(419); match(COMMA); - setState(426); + setState(420); booleanValue(); } } - setState(431); + setState(425); _errHandler.sync(this); _la = _input.LA(1); } - setState(432); + setState(426); match(CLOSING_BRACKET); } break; @@ -3486,27 +3472,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(434); + setState(428); match(OPENING_BRACKET); - setState(435); + setState(429); string(); - setState(440); + setState(434); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(436); + setState(430); match(COMMA); - setState(437); + setState(431); string(); } } - setState(442); + setState(436); _errHandler.sync(this); _la = _input.LA(1); } - setState(443); + setState(437); match(CLOSING_BRACKET); } break; @@ -3580,14 +3566,14 @@ public final ParamsContext params() throws RecognitionException { ParamsContext _localctx = new ParamsContext(_ctx, getState()); enterRule(_localctx, 64, RULE_params); try { - setState(449); + setState(443); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(447); + setState(441); match(PARAM); } break; @@ -3595,7 +3581,7 @@ public final ParamsContext params() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(448); + setState(442); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3644,9 +3630,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(451); + setState(445); match(LIMIT); - setState(452); + setState(446); match(INTEGER_LITERAL); } } @@ -3701,25 +3687,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(454); + setState(448); match(SORT); - setState(455); + setState(449); orderExpression(); - setState(460); + setState(454); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(456); + setState(450); match(COMMA); - setState(457); + setState(451); orderExpression(); } } } - setState(462); + setState(456); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } @@ -3775,14 +3761,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(463); + setState(457); booleanExpression(0); - setState(465); + setState(459); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(464); + setState(458); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3796,14 +3782,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(469); + setState(463); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(467); + setState(461); match(NULLS); - setState(468); + setState(462); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3862,9 +3848,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(471); + setState(465); match(KEEP); - setState(472); + setState(466); qualifiedNamePatterns(); } } @@ -3911,9 +3897,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(474); + setState(468); match(DROP); - setState(475); + setState(469); qualifiedNamePatterns(); } } @@ -3968,25 +3954,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(477); + setState(471); match(RENAME); - setState(478); + setState(472); renameClause(); - setState(483); + setState(477); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(479); + setState(473); match(COMMA); - setState(480); + setState(474); renameClause(); } } } - setState(485); + setState(479); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } @@ -4040,11 +4026,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(486); + setState(480); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(487); + setState(481); match(AS); - setState(488); + setState(482); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4097,18 +4083,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(490); + setState(484); match(DISSECT); - setState(491); + setState(485); primaryExpression(0); - setState(492); + setState(486); string(); - setState(494); + setState(488); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: { - setState(493); + setState(487); commandOptions(); } break; @@ -4161,11 +4147,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(490); match(GROK); - setState(497); + setState(491); primaryExpression(0); - setState(498); + setState(492); string(); } } @@ -4212,9 +4198,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(500); + setState(494); match(MV_EXPAND); - setState(501); + setState(495); qualifiedName(); } } @@ -4268,23 +4254,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(503); + setState(497); commandOption(); - setState(508); + setState(502); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(504); + setState(498); match(COMMA); - setState(505); + setState(499); commandOption(); } } } - setState(510); + setState(504); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } @@ -4336,11 +4322,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(511); + setState(505); identifier(); - setState(512); + setState(506); match(ASSIGN); - setState(513); + setState(507); constant(); } } @@ -4386,7 +4372,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(515); + setState(509); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4441,20 +4427,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 92, RULE_numericValue); try { - setState(519); + setState(513); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(517); + setState(511); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(518); + setState(512); integerValue(); } break; @@ -4503,12 +4489,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(516); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(521); + setState(515); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4521,7 +4507,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(524); + setState(518); match(DECIMAL_LITERAL); } } @@ -4568,12 +4554,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(527); + setState(521); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(526); + setState(520); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4586,7 +4572,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(529); + setState(523); match(INTEGER_LITERAL); } } @@ -4630,7 +4616,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(531); + setState(525); match(QUOTED_STRING); } } @@ -4680,7 +4666,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(527); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4735,9 +4721,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(535); + setState(529); match(EXPLAIN); - setState(536); + setState(530); subqueryExpression(); } } @@ -4785,11 +4771,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(532); match(OPENING_BRACKET); - setState(539); + setState(533); query(0); - setState(540); + setState(534); match(CLOSING_BRACKET); } } @@ -4846,9 +4832,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(542); + setState(536); match(SHOW); - setState(543); + setState(537); match(INFO); } } @@ -4905,9 +4891,9 @@ public final MetaCommandContext metaCommand() throws RecognitionException { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(545); + setState(539); match(META); - setState(546); + setState(540); match(FUNCTIONS); } } @@ -4970,46 +4956,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(548); + setState(542); match(ENRICH); - setState(549); + setState(543); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(552); + setState(546); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(550); + setState(544); match(ON); - setState(551); + setState(545); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(563); + setState(557); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(554); + setState(548); match(WITH); - setState(555); + setState(549); enrichWithClause(); - setState(560); + setState(554); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(556); + setState(550); match(COMMA); - setState(557); + setState(551); enrichWithClause(); } } } - setState(562); + setState(556); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } @@ -5066,19 +5052,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(562); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(565); + setState(559); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(566); + setState(560); match(ASSIGN); } break; } - setState(570); + setState(564); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5131,13 +5117,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(572); + setState(566); match(DEV_LOOKUP); - setState(573); + setState(567); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(574); + setState(568); match(ON); - setState(575); + setState(569); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5190,18 +5176,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(577); + setState(571); match(DEV_INLINESTATS); - setState(578); + setState(572); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(581); + setState(575); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(579); + setState(573); match(BY); - setState(580); + setState(574); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5219,99 +5205,6 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } - @SuppressWarnings("CheckReturnValue") - public static class MatchCommandContext extends ParserRuleContext { - public TerminalNode DEV_MATCH() { return getToken(EsqlBaseParser.DEV_MATCH, 0); } - public MatchQueryContext matchQuery() { - return getRuleContext(MatchQueryContext.class,0); - } - @SuppressWarnings("this-escape") - public MatchCommandContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_matchCommand; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMatchCommand(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMatchCommand(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMatchCommand(this); - else return visitor.visitChildren(this); - } - } - - public final MatchCommandContext matchCommand() throws RecognitionException { - MatchCommandContext _localctx = new MatchCommandContext(_ctx, getState()); - enterRule(_localctx, 118, RULE_matchCommand); - try { - enterOuterAlt(_localctx, 1); - { - setState(583); - match(DEV_MATCH); - setState(584); - matchQuery(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class MatchQueryContext extends ParserRuleContext { - public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } - @SuppressWarnings("this-escape") - public MatchQueryContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_matchQuery; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMatchQuery(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMatchQuery(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMatchQuery(this); - else return visitor.visitChildren(this); - } - } - - public final MatchQueryContext matchQuery() throws RecognitionException { - MatchQueryContext _localctx = new MatchQueryContext(_ctx, getState()); - enterRule(_localctx, 120, RULE_matchQuery); - try { - enterOuterAlt(_localctx, 1); - { - setState(586); - match(QUOTED_STRING); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5349,41 +5242,39 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in return this.isDevVersion(); case 3: return this.isDevVersion(); - case 4: - return this.isDevVersion(); } return true; } private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 5: + case 4: return this.isDevVersion(); - case 6: + case 5: return precpred(_ctx, 5); - case 7: + case 6: return precpred(_ctx, 4); } return true; } private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 8: + case 7: return precpred(_ctx, 2); - case 9: + case 8: return precpred(_ctx, 1); } return true; } private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return precpred(_ctx, 1); } return true; } public static final String _serializedATN = - "\u0004\u0001}\u024d\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001}\u0242\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5398,365 +5289,359 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u0084\b\u0001\n"+ - "\u0001\f\u0001\u0087\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0090\b\u0002\u0001"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0005\u0001\u0080\b\u0001\n\u0001\f\u0001\u0083\t\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002\u008c\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003"+ - "\u0003\u00a4\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00b0\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005\u00b7\b\u0005\n\u0005\f\u0005\u00ba\t\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00c1\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00c7\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00cf\b\u0005\n\u0005\f\u0005\u00d2\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00d6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00dd\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00e2\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00ed\b\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f3\b\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0005\t\u00fb\b\t\n\t\f\t\u00fe\t\t\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0108\b\n\u0001"+ - "\n\u0001\n\u0001\n\u0005\n\u010d\b\n\n\n\f\n\u0110\t\n\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0118"+ - "\b\u000b\n\u000b\f\u000b\u011b\t\u000b\u0003\u000b\u011d\b\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0005\u000e\u0129\b\u000e\n\u000e\f\u000e\u012c"+ - "\t\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003"+ - "\u000f\u0133\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005"+ - "\u0010\u0139\b\u0010\n\u0010\f\u0010\u013c\t\u0010\u0001\u0010\u0003\u0010"+ - "\u013f\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0003\u0011\u0146\b\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0001\u0014\u0003\u0014\u014e\b\u0014\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0005\u0015\u0154\b\u0015\n\u0015\f\u0015\u0157"+ - "\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0161\b\u0017\n\u0017\f\u0017"+ - "\u0164\t\u0017\u0001\u0017\u0003\u0017\u0167\b\u0017\u0001\u0017\u0001"+ - "\u0017\u0003\u0017\u016b\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ - "\u0019\u0001\u0019\u0003\u0019\u0172\b\u0019\u0001\u0019\u0001\u0019\u0003"+ - "\u0019\u0176\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u017b"+ - "\b\u001a\n\u001a\f\u001a\u017e\t\u001a\u0001\u001b\u0001\u001b\u0001\u001b"+ - "\u0005\u001b\u0183\b\u001b\n\u001b\f\u001b\u0186\t\u001b\u0001\u001c\u0001"+ - "\u001c\u0001\u001c\u0005\u001c\u018b\b\u001c\n\u001c\f\u001c\u018e\t\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ + "\u0003\u0003\u0003\u009e\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0003\u0005\u00aa\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0005\u0005\u00b1\b\u0005\n\u0005\f\u0005\u00b4\t\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00bb\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00c1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0005\u0005\u00c9\b\u0005\n\u0005\f\u0005\u00cc\t\u0005\u0001"+ + "\u0006\u0001\u0006\u0003\u0006\u00d0\b\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d7\b\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0003\u0006\u00dc\b\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00e7"+ + "\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f5\b\t\n\t\f\t\u00f8\t\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0102"+ + "\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0107\b\n\n\n\f\n\u010a\t\n\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005"+ + "\u000b\u0112\b\u000b\n\u000b\f\u000b\u0115\t\u000b\u0003\u000b\u0117\b"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e\u0123\b\u000e\n\u000e"+ + "\f\u000e\u0126\t\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0003\u000f\u012d\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0005\u0010\u0133\b\u0010\n\u0010\f\u0010\u0136\t\u0010\u0001"+ + "\u0010\u0003\u0010\u0139\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0003\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0003\u0014\u0148\b\u0014\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u014e\b\u0015\n"+ + "\u0015\f\u0015\u0151\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u015b"+ + "\b\u0017\n\u0017\f\u0017\u015e\t\u0017\u0001\u0017\u0003\u0017\u0161\b"+ + "\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0165\b\u0017\u0001\u0018\u0001"+ + "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0003\u0019\u016c\b\u0019\u0001"+ + "\u0019\u0001\u0019\u0003\u0019\u0170\b\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0005\u001a\u0175\b\u001a\n\u001a\f\u001a\u0178\t\u001a\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0005\u001b\u017d\b\u001b\n\u001b\f\u001b\u0180"+ + "\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0185\b\u001c"+ + "\n\u001c\f\u001c\u0188\t\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0005\u001f\u019b\b\u001f\n\u001f\f\u001f\u019e\t\u001f"+ "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f"+ - "\u01a1\b\u001f\n\u001f\f\u001f\u01a4\t\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01ac\b\u001f\n"+ - "\u001f\f\u001f\u01af\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b7\b\u001f\n\u001f\f\u001f"+ - "\u01ba\t\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u01be\b\u001f\u0001"+ - " \u0001 \u0003 \u01c2\b \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0005\"\u01cb\b\"\n\"\f\"\u01ce\t\"\u0001#\u0001#\u0003#\u01d2"+ - "\b#\u0001#\u0001#\u0003#\u01d6\b#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001"+ - "%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e2\b&\n&\f&\u01e5\t&\u0001\'\u0001"+ - "\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001(\u0003(\u01ef\b(\u0001)"+ - "\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0005"+ - "+\u01fb\b+\n+\f+\u01fe\t+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ - ".\u0001.\u0003.\u0208\b.\u0001/\u0003/\u020b\b/\u0001/\u0001/\u00010\u0003"+ - "0\u0210\b0\u00010\u00010\u00011\u00011\u00012\u00012\u00013\u00013\u0001"+ - "3\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u0001"+ - "6\u00017\u00017\u00017\u00017\u00037\u0229\b7\u00017\u00017\u00017\u0001"+ - "7\u00057\u022f\b7\n7\f7\u0232\t7\u00037\u0234\b7\u00018\u00018\u00018"+ - "\u00038\u0239\b8\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0003:\u0246\b:\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0000\u0004\u0002\n\u0012\u0014=\u0000\u0002\u0004\u0006\b\n"+ - "\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.0246"+ - "8:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvx\u0000\b\u0001\u0000<=\u0001\u0000>"+ - "@\u0002\u0000\u001b\u001bMM\u0001\u0000DE\u0002\u0000 $$\u0002\u0000"+ - "\'\'**\u0002\u0000&&44\u0002\u0000557;\u0265\u0000z\u0001\u0000\u0000"+ - "\u0000\u0002}\u0001\u0000\u0000\u0000\u0004\u008f\u0001\u0000\u0000\u0000"+ - "\u0006\u00a3\u0001\u0000\u0000\u0000\b\u00a5\u0001\u0000\u0000\u0000\n"+ - "\u00c6\u0001\u0000\u0000\u0000\f\u00e1\u0001\u0000\u0000\u0000\u000e\u00e3"+ - "\u0001\u0000\u0000\u0000\u0010\u00ec\u0001\u0000\u0000\u0000\u0012\u00f2"+ - "\u0001\u0000\u0000\u0000\u0014\u0107\u0001\u0000\u0000\u0000\u0016\u0111"+ - "\u0001\u0000\u0000\u0000\u0018\u0120\u0001\u0000\u0000\u0000\u001a\u0122"+ - "\u0001\u0000\u0000\u0000\u001c\u0125\u0001\u0000\u0000\u0000\u001e\u0132"+ - "\u0001\u0000\u0000\u0000 \u0134\u0001\u0000\u0000\u0000\"\u0145\u0001"+ - "\u0000\u0000\u0000$\u0147\u0001\u0000\u0000\u0000&\u0149\u0001\u0000\u0000"+ - "\u0000(\u014d\u0001\u0000\u0000\u0000*\u014f\u0001\u0000\u0000\u0000,"+ - "\u0158\u0001\u0000\u0000\u0000.\u015c\u0001\u0000\u0000\u00000\u016c\u0001"+ - "\u0000\u0000\u00002\u016f\u0001\u0000\u0000\u00004\u0177\u0001\u0000\u0000"+ - "\u00006\u017f\u0001\u0000\u0000\u00008\u0187\u0001\u0000\u0000\u0000:"+ - "\u018f\u0001\u0000\u0000\u0000<\u0191\u0001\u0000\u0000\u0000>\u01bd\u0001"+ - "\u0000\u0000\u0000@\u01c1\u0001\u0000\u0000\u0000B\u01c3\u0001\u0000\u0000"+ - "\u0000D\u01c6\u0001\u0000\u0000\u0000F\u01cf\u0001\u0000\u0000\u0000H"+ - "\u01d7\u0001\u0000\u0000\u0000J\u01da\u0001\u0000\u0000\u0000L\u01dd\u0001"+ - "\u0000\u0000\u0000N\u01e6\u0001\u0000\u0000\u0000P\u01ea\u0001\u0000\u0000"+ - "\u0000R\u01f0\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V"+ - "\u01f7\u0001\u0000\u0000\u0000X\u01ff\u0001\u0000\u0000\u0000Z\u0203\u0001"+ - "\u0000\u0000\u0000\\\u0207\u0001\u0000\u0000\u0000^\u020a\u0001\u0000"+ - "\u0000\u0000`\u020f\u0001\u0000\u0000\u0000b\u0213\u0001\u0000\u0000\u0000"+ - "d\u0215\u0001\u0000\u0000\u0000f\u0217\u0001\u0000\u0000\u0000h\u021a"+ - "\u0001\u0000\u0000\u0000j\u021e\u0001\u0000\u0000\u0000l\u0221\u0001\u0000"+ - "\u0000\u0000n\u0224\u0001\u0000\u0000\u0000p\u0238\u0001\u0000\u0000\u0000"+ - "r\u023c\u0001\u0000\u0000\u0000t\u0241\u0001\u0000\u0000\u0000v\u0247"+ - "\u0001\u0000\u0000\u0000x\u024a\u0001\u0000\u0000\u0000z{\u0003\u0002"+ - "\u0001\u0000{|\u0005\u0000\u0000\u0001|\u0001\u0001\u0000\u0000\u0000"+ - "}~\u0006\u0001\uffff\uffff\u0000~\u007f\u0003\u0004\u0002\u0000\u007f"+ - "\u0085\u0001\u0000\u0000\u0000\u0080\u0081\n\u0001\u0000\u0000\u0081\u0082"+ - "\u0005\u001a\u0000\u0000\u0082\u0084\u0003\u0006\u0003\u0000\u0083\u0080"+ - "\u0001\u0000\u0000\u0000\u0084\u0087\u0001\u0000\u0000\u0000\u0085\u0083"+ - "\u0001\u0000\u0000\u0000\u0085\u0086\u0001\u0000\u0000\u0000\u0086\u0003"+ - "\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000\u0000\u0000\u0088\u0090"+ - "\u0003f3\u0000\u0089\u0090\u0003 \u0010\u0000\u008a\u0090\u0003l6\u0000"+ - "\u008b\u0090\u0003\u001a\r\u0000\u008c\u0090\u0003j5\u0000\u008d\u008e"+ - "\u0004\u0002\u0001\u0000\u008e\u0090\u0003.\u0017\u0000\u008f\u0088\u0001"+ - "\u0000\u0000\u0000\u008f\u0089\u0001\u0000\u0000\u0000\u008f\u008a\u0001"+ - "\u0000\u0000\u0000\u008f\u008b\u0001\u0000\u0000\u0000\u008f\u008c\u0001"+ - "\u0000\u0000\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u0090\u0005\u0001"+ - "\u0000\u0000\u0000\u0091\u00a4\u00030\u0018\u0000\u0092\u00a4\u0003\b"+ - "\u0004\u0000\u0093\u00a4\u0003H$\u0000\u0094\u00a4\u0003B!\u0000\u0095"+ - "\u00a4\u00032\u0019\u0000\u0096\u00a4\u0003D\"\u0000\u0097\u00a4\u0003"+ - "J%\u0000\u0098\u00a4\u0003L&\u0000\u0099\u00a4\u0003P(\u0000\u009a\u00a4"+ - "\u0003R)\u0000\u009b\u00a4\u0003n7\u0000\u009c\u00a4\u0003T*\u0000\u009d"+ - "\u009e\u0004\u0003\u0002\u0000\u009e\u00a4\u0003t:\u0000\u009f\u00a0\u0004"+ - "\u0003\u0003\u0000\u00a0\u00a4\u0003r9\u0000\u00a1\u00a2\u0004\u0003\u0004"+ - "\u0000\u00a2\u00a4\u0003v;\u0000\u00a3\u0091\u0001\u0000\u0000\u0000\u00a3"+ - "\u0092\u0001\u0000\u0000\u0000\u00a3\u0093\u0001\u0000\u0000\u0000\u00a3"+ - "\u0094\u0001\u0000\u0000\u0000\u00a3\u0095\u0001\u0000\u0000\u0000\u00a3"+ - "\u0096\u0001\u0000\u0000\u0000\u00a3\u0097\u0001\u0000\u0000\u0000\u00a3"+ - "\u0098\u0001\u0000\u0000\u0000\u00a3\u0099\u0001\u0000\u0000\u0000\u00a3"+ - "\u009a\u0001\u0000\u0000\u0000\u00a3\u009b\u0001\u0000\u0000\u0000\u00a3"+ - "\u009c\u0001\u0000\u0000\u0000\u00a3\u009d\u0001\u0000\u0000\u0000\u00a3"+ - "\u009f\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000\u0000\u00a4"+ - "\u0007\u0001\u0000\u0000\u0000\u00a5\u00a6\u0005\u0011\u0000\u0000\u00a6"+ - "\u00a7\u0003\n\u0005\u0000\u00a7\t\u0001\u0000\u0000\u0000\u00a8\u00a9"+ - "\u0006\u0005\uffff\uffff\u0000\u00a9\u00aa\u0005-\u0000\u0000\u00aa\u00c7"+ - "\u0003\n\u0005\b\u00ab\u00c7\u0003\u0010\b\u0000\u00ac\u00c7\u0003\f\u0006"+ - "\u0000\u00ad\u00af\u0003\u0010\b\u0000\u00ae\u00b0\u0005-\u0000\u0000"+ - "\u00af\u00ae\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000\u0000"+ - "\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1\u00b2\u0005(\u0000\u0000\u00b2"+ - "\u00b3\u0005,\u0000\u0000\u00b3\u00b8\u0003\u0010\b\u0000\u00b4\u00b5"+ - "\u0005#\u0000\u0000\u00b5\u00b7\u0003\u0010\b\u0000\u00b6\u00b4\u0001"+ - "\u0000\u0000\u0000\u00b7\u00ba\u0001\u0000\u0000\u0000\u00b8\u00b6\u0001"+ - "\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00bb\u0001"+ - "\u0000\u0000\u0000\u00ba\u00b8\u0001\u0000\u0000\u0000\u00bb\u00bc\u0005"+ - "3\u0000\u0000\u00bc\u00c7\u0001\u0000\u0000\u0000\u00bd\u00be\u0003\u0010"+ - "\b\u0000\u00be\u00c0\u0005)\u0000\u0000\u00bf\u00c1\u0005-\u0000\u0000"+ - "\u00c0\u00bf\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000"+ - "\u00c1\u00c2\u0001\u0000\u0000\u0000\u00c2\u00c3\u0005.\u0000\u0000\u00c3"+ - "\u00c7\u0001\u0000\u0000\u0000\u00c4\u00c5\u0004\u0005\u0005\u0000\u00c5"+ - "\u00c7\u0003\u000e\u0007\u0000\u00c6\u00a8\u0001\u0000\u0000\u0000\u00c6"+ - "\u00ab\u0001\u0000\u0000\u0000\u00c6\u00ac\u0001\u0000\u0000\u0000\u00c6"+ - "\u00ad\u0001\u0000\u0000\u0000\u00c6\u00bd\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c4\u0001\u0000\u0000\u0000\u00c7\u00d0\u0001\u0000\u0000\u0000\u00c8"+ - "\u00c9\n\u0005\u0000\u0000\u00c9\u00ca\u0005\u001f\u0000\u0000\u00ca\u00cf"+ - "\u0003\n\u0005\u0006\u00cb\u00cc\n\u0004\u0000\u0000\u00cc\u00cd\u0005"+ - "0\u0000\u0000\u00cd\u00cf\u0003\n\u0005\u0005\u00ce\u00c8\u0001\u0000"+ - "\u0000\u0000\u00ce\u00cb\u0001\u0000\u0000\u0000\u00cf\u00d2\u0001\u0000"+ - "\u0000\u0000\u00d0\u00ce\u0001\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000"+ - "\u0000\u0000\u00d1\u000b\u0001\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000"+ - "\u0000\u0000\u00d3\u00d5\u0003\u0010\b\u0000\u00d4\u00d6\u0005-\u0000"+ - "\u0000\u00d5\u00d4\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u00d8\u0005+\u0000\u0000"+ - "\u00d8\u00d9\u0003b1\u0000\u00d9\u00e2\u0001\u0000\u0000\u0000\u00da\u00dc"+ - "\u0003\u0010\b\u0000\u00db\u00dd\u0005-\u0000\u0000\u00dc\u00db\u0001"+ - "\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u0001"+ - "\u0000\u0000\u0000\u00de\u00df\u00052\u0000\u0000\u00df\u00e0\u0003b1"+ - "\u0000\u00e0\u00e2\u0001\u0000\u0000\u0000\u00e1\u00d3\u0001\u0000\u0000"+ - "\u0000\u00e1\u00da\u0001\u0000\u0000\u0000\u00e2\r\u0001\u0000\u0000\u0000"+ - "\u00e3\u00e4\u0003\u0010\b\u0000\u00e4\u00e5\u0005\u0014\u0000\u0000\u00e5"+ - "\u00e6\u0003b1\u0000\u00e6\u000f\u0001\u0000\u0000\u0000\u00e7\u00ed\u0003"+ - "\u0012\t\u0000\u00e8\u00e9\u0003\u0012\t\u0000\u00e9\u00ea\u0003d2\u0000"+ - "\u00ea\u00eb\u0003\u0012\t\u0000\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec"+ - "\u00e7\u0001\u0000\u0000\u0000\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ed"+ - "\u0011\u0001\u0000\u0000\u0000\u00ee\u00ef\u0006\t\uffff\uffff\u0000\u00ef"+ - "\u00f3\u0003\u0014\n\u0000\u00f0\u00f1\u0007\u0000\u0000\u0000\u00f1\u00f3"+ - "\u0003\u0012\t\u0003\u00f2\u00ee\u0001\u0000\u0000\u0000\u00f2\u00f0\u0001"+ - "\u0000\u0000\u0000\u00f3\u00fc\u0001\u0000\u0000\u0000\u00f4\u00f5\n\u0002"+ - "\u0000\u0000\u00f5\u00f6\u0007\u0001\u0000\u0000\u00f6\u00fb\u0003\u0012"+ - "\t\u0003\u00f7\u00f8\n\u0001\u0000\u0000\u00f8\u00f9\u0007\u0000\u0000"+ - "\u0000\u00f9\u00fb\u0003\u0012\t\u0002\u00fa\u00f4\u0001\u0000\u0000\u0000"+ - "\u00fa\u00f7\u0001\u0000\u0000\u0000\u00fb\u00fe\u0001\u0000\u0000\u0000"+ - "\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fc\u00fd\u0001\u0000\u0000\u0000"+ - "\u00fd\u0013\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000"+ - "\u00ff\u0100\u0006\n\uffff\uffff\u0000\u0100\u0108\u0003>\u001f\u0000"+ - "\u0101\u0108\u00034\u001a\u0000\u0102\u0108\u0003\u0016\u000b\u0000\u0103"+ - "\u0104\u0005,\u0000\u0000\u0104\u0105\u0003\n\u0005\u0000\u0105\u0106"+ - "\u00053\u0000\u0000\u0106\u0108\u0001\u0000\u0000\u0000\u0107\u00ff\u0001"+ - "\u0000\u0000\u0000\u0107\u0101\u0001\u0000\u0000\u0000\u0107\u0102\u0001"+ - "\u0000\u0000\u0000\u0107\u0103\u0001\u0000\u0000\u0000\u0108\u010e\u0001"+ - "\u0000\u0000\u0000\u0109\u010a\n\u0001\u0000\u0000\u010a\u010b\u0005\""+ - "\u0000\u0000\u010b\u010d\u0003\u0018\f\u0000\u010c\u0109\u0001\u0000\u0000"+ - "\u0000\u010d\u0110\u0001\u0000\u0000\u0000\u010e\u010c\u0001\u0000\u0000"+ - "\u0000\u010e\u010f\u0001\u0000\u0000\u0000\u010f\u0015\u0001\u0000\u0000"+ - "\u0000\u0110\u010e\u0001\u0000\u0000\u0000\u0111\u0112\u0003:\u001d\u0000"+ - "\u0112\u011c\u0005,\u0000\u0000\u0113\u011d\u0005>\u0000\u0000\u0114\u0119"+ - "\u0003\n\u0005\u0000\u0115\u0116\u0005#\u0000\u0000\u0116\u0118\u0003"+ - "\n\u0005\u0000\u0117\u0115\u0001\u0000\u0000\u0000\u0118\u011b\u0001\u0000"+ - "\u0000\u0000\u0119\u0117\u0001\u0000\u0000\u0000\u0119\u011a\u0001\u0000"+ - "\u0000\u0000\u011a\u011d\u0001\u0000\u0000\u0000\u011b\u0119\u0001\u0000"+ - "\u0000\u0000\u011c\u0113\u0001\u0000\u0000\u0000\u011c\u0114\u0001\u0000"+ - "\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d\u011e\u0001\u0000"+ - "\u0000\u0000\u011e\u011f\u00053\u0000\u0000\u011f\u0017\u0001\u0000\u0000"+ - "\u0000\u0120\u0121\u0003:\u001d\u0000\u0121\u0019\u0001\u0000\u0000\u0000"+ - "\u0122\u0123\u0005\r\u0000\u0000\u0123\u0124\u0003\u001c\u000e\u0000\u0124"+ - "\u001b\u0001\u0000\u0000\u0000\u0125\u012a\u0003\u001e\u000f\u0000\u0126"+ - "\u0127\u0005#\u0000\u0000\u0127\u0129\u0003\u001e\u000f\u0000\u0128\u0126"+ - "\u0001\u0000\u0000\u0000\u0129\u012c\u0001\u0000\u0000\u0000\u012a\u0128"+ - "\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000\u0000\u0000\u012b\u001d"+ - "\u0001\u0000\u0000\u0000\u012c\u012a\u0001\u0000\u0000\u0000\u012d\u0133"+ - "\u0003\n\u0005\u0000\u012e\u012f\u00034\u001a\u0000\u012f\u0130\u0005"+ - "!\u0000\u0000\u0130\u0131\u0003\n\u0005\u0000\u0131\u0133\u0001\u0000"+ - "\u0000\u0000\u0132\u012d\u0001\u0000\u0000\u0000\u0132\u012e\u0001\u0000"+ - "\u0000\u0000\u0133\u001f\u0001\u0000\u0000\u0000\u0134\u0135\u0005\u0006"+ - "\u0000\u0000\u0135\u013a\u0003\"\u0011\u0000\u0136\u0137\u0005#\u0000"+ - "\u0000\u0137\u0139\u0003\"\u0011\u0000\u0138\u0136\u0001\u0000\u0000\u0000"+ - "\u0139\u013c\u0001\u0000\u0000\u0000\u013a\u0138\u0001\u0000\u0000\u0000"+ - "\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013e\u0001\u0000\u0000\u0000"+ - "\u013c\u013a\u0001\u0000\u0000\u0000\u013d\u013f\u0003(\u0014\u0000\u013e"+ - "\u013d\u0001\u0000\u0000\u0000\u013e\u013f\u0001\u0000\u0000\u0000\u013f"+ - "!\u0001\u0000\u0000\u0000\u0140\u0141\u0003$\u0012\u0000\u0141\u0142\u0005"+ - "m\u0000\u0000\u0142\u0143\u0003&\u0013\u0000\u0143\u0146\u0001\u0000\u0000"+ - "\u0000\u0144\u0146\u0003&\u0013\u0000\u0145\u0140\u0001\u0000\u0000\u0000"+ - "\u0145\u0144\u0001\u0000\u0000\u0000\u0146#\u0001\u0000\u0000\u0000\u0147"+ - "\u0148\u0005M\u0000\u0000\u0148%\u0001\u0000\u0000\u0000\u0149\u014a\u0007"+ - "\u0002\u0000\u0000\u014a\'\u0001\u0000\u0000\u0000\u014b\u014e\u0003*"+ - "\u0015\u0000\u014c\u014e\u0003,\u0016\u0000\u014d\u014b\u0001\u0000\u0000"+ - "\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014e)\u0001\u0000\u0000\u0000"+ - "\u014f\u0150\u0005L\u0000\u0000\u0150\u0155\u0005M\u0000\u0000\u0151\u0152"+ - "\u0005#\u0000\u0000\u0152\u0154\u0005M\u0000\u0000\u0153\u0151\u0001\u0000"+ - "\u0000\u0000\u0154\u0157\u0001\u0000\u0000\u0000\u0155\u0153\u0001\u0000"+ - "\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156+\u0001\u0000\u0000"+ - "\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u0159\u0005B\u0000\u0000"+ - "\u0159\u015a\u0003*\u0015\u0000\u015a\u015b\u0005C\u0000\u0000\u015b-"+ - "\u0001\u0000\u0000\u0000\u015c\u015d\u0005\u0015\u0000\u0000\u015d\u0162"+ - "\u0003\"\u0011\u0000\u015e\u015f\u0005#\u0000\u0000\u015f\u0161\u0003"+ - "\"\u0011\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0161\u0164\u0001\u0000"+ - "\u0000\u0000\u0162\u0160\u0001\u0000\u0000\u0000\u0162\u0163\u0001\u0000"+ - "\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0167\u0003\u001c\u000e\u0000\u0166\u0165\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167\u016a\u0001\u0000"+ - "\u0000\u0000\u0168\u0169\u0005\u001e\u0000\u0000\u0169\u016b\u0003\u001c"+ - "\u000e\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a\u016b\u0001\u0000"+ - "\u0000\u0000\u016b/\u0001\u0000\u0000\u0000\u016c\u016d\u0005\u0004\u0000"+ - "\u0000\u016d\u016e\u0003\u001c\u000e\u0000\u016e1\u0001\u0000\u0000\u0000"+ - "\u016f\u0171\u0005\u0010\u0000\u0000\u0170\u0172\u0003\u001c\u000e\u0000"+ - "\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000"+ - "\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\u001e\u0000\u0000"+ - "\u0174\u0176\u0003\u001c\u000e\u0000\u0175\u0173\u0001\u0000\u0000\u0000"+ - "\u0175\u0176\u0001\u0000\u0000\u0000\u01763\u0001\u0000\u0000\u0000\u0177"+ - "\u017c\u0003:\u001d\u0000\u0178\u0179\u0005%\u0000\u0000\u0179\u017b\u0003"+ - ":\u001d\u0000\u017a\u0178\u0001\u0000\u0000\u0000\u017b\u017e\u0001\u0000"+ - "\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017c\u017d\u0001\u0000"+ - "\u0000\u0000\u017d5\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000"+ - "\u0000\u017f\u0184\u0003<\u001e\u0000\u0180\u0181\u0005%\u0000\u0000\u0181"+ - "\u0183\u0003<\u001e\u0000\u0182\u0180\u0001\u0000\u0000\u0000\u0183\u0186"+ - "\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000\u0184\u0185"+ - "\u0001\u0000\u0000\u0000\u01857\u0001\u0000\u0000\u0000\u0186\u0184\u0001"+ - "\u0000\u0000\u0000\u0187\u018c\u00036\u001b\u0000\u0188\u0189\u0005#\u0000"+ - "\u0000\u0189\u018b\u00036\u001b\u0000\u018a\u0188\u0001\u0000\u0000\u0000"+ - "\u018b\u018e\u0001\u0000\u0000\u0000\u018c\u018a\u0001\u0000\u0000\u0000"+ - "\u018c\u018d\u0001\u0000\u0000\u0000\u018d9\u0001\u0000\u0000\u0000\u018e"+ - "\u018c\u0001\u0000\u0000\u0000\u018f\u0190\u0007\u0003\u0000\u0000\u0190"+ - ";\u0001\u0000\u0000\u0000\u0191\u0192\u0005Q\u0000\u0000\u0192=\u0001"+ - "\u0000\u0000\u0000\u0193\u01be\u0005.\u0000\u0000\u0194\u0195\u0003`0"+ - "\u0000\u0195\u0196\u0005D\u0000\u0000\u0196\u01be\u0001\u0000\u0000\u0000"+ - "\u0197\u01be\u0003^/\u0000\u0198\u01be\u0003`0\u0000\u0199\u01be\u0003"+ - "Z-\u0000\u019a\u01be\u0003@ \u0000\u019b\u01be\u0003b1\u0000\u019c\u019d"+ - "\u0005B\u0000\u0000\u019d\u01a2\u0003\\.\u0000\u019e\u019f\u0005#\u0000"+ - "\u0000\u019f\u01a1\u0003\\.\u0000\u01a0\u019e\u0001\u0000\u0000\u0000"+ - "\u01a1\u01a4\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000"+ - "\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a5\u0001\u0000\u0000\u0000"+ - "\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005C\u0000\u0000\u01a6"+ - "\u01be\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005B\u0000\u0000\u01a8\u01ad"+ - "\u0003Z-\u0000\u01a9\u01aa\u0005#\u0000\u0000\u01aa\u01ac\u0003Z-\u0000"+ - "\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000\u0000"+ - "\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000\u0000"+ - "\u01ae\u01b0\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000"+ - "\u01b0\u01b1\u0005C\u0000\u0000\u01b1\u01be\u0001\u0000\u0000\u0000\u01b2"+ - "\u01b3\u0005B\u0000\u0000\u01b3\u01b8\u0003b1\u0000\u01b4\u01b5\u0005"+ - "#\u0000\u0000\u01b5\u01b7\u0003b1\u0000\u01b6\u01b4\u0001\u0000\u0000"+ - "\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001\u0000\u0000"+ - "\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9\u01bb\u0001\u0000\u0000"+ - "\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0005C\u0000\u0000"+ - "\u01bc\u01be\u0001\u0000\u0000\u0000\u01bd\u0193\u0001\u0000\u0000\u0000"+ - "\u01bd\u0194\u0001\u0000\u0000\u0000\u01bd\u0197\u0001\u0000\u0000\u0000"+ - "\u01bd\u0198\u0001\u0000\u0000\u0000\u01bd\u0199\u0001\u0000\u0000\u0000"+ - "\u01bd\u019a\u0001\u0000\u0000\u0000\u01bd\u019b\u0001\u0000\u0000\u0000"+ - "\u01bd\u019c\u0001\u0000\u0000\u0000\u01bd\u01a7\u0001\u0000\u0000\u0000"+ - "\u01bd\u01b2\u0001\u0000\u0000\u0000\u01be?\u0001\u0000\u0000\u0000\u01bf"+ - "\u01c2\u00051\u0000\u0000\u01c0\u01c2\u0005A\u0000\u0000\u01c1\u01bf\u0001"+ - "\u0000\u0000\u0000\u01c1\u01c0\u0001\u0000\u0000\u0000\u01c2A\u0001\u0000"+ - "\u0000\u0000\u01c3\u01c4\u0005\t\u0000\u0000\u01c4\u01c5\u0005\u001c\u0000"+ - "\u0000\u01c5C\u0001\u0000\u0000\u0000\u01c6\u01c7\u0005\u000f\u0000\u0000"+ - "\u01c7\u01cc\u0003F#\u0000\u01c8\u01c9\u0005#\u0000\u0000\u01c9\u01cb"+ - "\u0003F#\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001\u0000"+ - "\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000"+ - "\u0000\u0000\u01cdE\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001\u0000\u0000"+ - "\u0000\u01cf\u01d1\u0003\n\u0005\u0000\u01d0\u01d2\u0007\u0004\u0000\u0000"+ - "\u01d1\u01d0\u0001\u0000\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000"+ - "\u01d2\u01d5\u0001\u0000\u0000\u0000\u01d3\u01d4\u0005/\u0000\u0000\u01d4"+ - "\u01d6\u0007\u0005\u0000\u0000\u01d5\u01d3\u0001\u0000\u0000\u0000\u01d5"+ - "\u01d6\u0001\u0000\u0000\u0000\u01d6G\u0001\u0000\u0000\u0000\u01d7\u01d8"+ - "\u0005\b\u0000\u0000\u01d8\u01d9\u00038\u001c\u0000\u01d9I\u0001\u0000"+ - "\u0000\u0000\u01da\u01db\u0005\u0002\u0000\u0000\u01db\u01dc\u00038\u001c"+ - "\u0000\u01dcK\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\f\u0000\u0000"+ - "\u01de\u01e3\u0003N\'\u0000\u01df\u01e0\u0005#\u0000\u0000\u01e0\u01e2"+ - "\u0003N\'\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e2\u01e5\u0001"+ - "\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001"+ - "\u0000\u0000\u0000\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000"+ - "\u0000\u0000\u01e6\u01e7\u00036\u001b\u0000\u01e7\u01e8\u0005U\u0000\u0000"+ - "\u01e8\u01e9\u00036\u001b\u0000\u01e9O\u0001\u0000\u0000\u0000\u01ea\u01eb"+ - "\u0005\u0001\u0000\u0000\u01eb\u01ec\u0003\u0014\n\u0000\u01ec\u01ee\u0003"+ - "b1\u0000\u01ed\u01ef\u0003V+\u0000\u01ee\u01ed\u0001\u0000\u0000\u0000"+ - "\u01ee\u01ef\u0001\u0000\u0000\u0000\u01efQ\u0001\u0000\u0000\u0000\u01f0"+ - "\u01f1\u0005\u0007\u0000\u0000\u01f1\u01f2\u0003\u0014\n\u0000\u01f2\u01f3"+ - "\u0003b1\u0000\u01f3S\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005\u000b"+ - "\u0000\u0000\u01f5\u01f6\u00034\u001a\u0000\u01f6U\u0001\u0000\u0000\u0000"+ - "\u01f7\u01fc\u0003X,\u0000\u01f8\u01f9\u0005#\u0000\u0000\u01f9\u01fb"+ - "\u0003X,\u0000\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fb\u01fe\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fd\u0001\u0000"+ - "\u0000\u0000\u01fdW\u0001\u0000\u0000\u0000\u01fe\u01fc\u0001\u0000\u0000"+ - "\u0000\u01ff\u0200\u0003:\u001d\u0000\u0200\u0201\u0005!\u0000\u0000\u0201"+ - "\u0202\u0003>\u001f\u0000\u0202Y\u0001\u0000\u0000\u0000\u0203\u0204\u0007"+ - "\u0006\u0000\u0000\u0204[\u0001\u0000\u0000\u0000\u0205\u0208\u0003^/"+ - "\u0000\u0206\u0208\u0003`0\u0000\u0207\u0205\u0001\u0000\u0000\u0000\u0207"+ - "\u0206\u0001\u0000\u0000\u0000\u0208]\u0001\u0000\u0000\u0000\u0209\u020b"+ - "\u0007\u0000\u0000\u0000\u020a\u0209\u0001\u0000\u0000\u0000\u020a\u020b"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d"+ - "\u0005\u001d\u0000\u0000\u020d_\u0001\u0000\u0000\u0000\u020e\u0210\u0007"+ - "\u0000\u0000\u0000\u020f\u020e\u0001\u0000\u0000\u0000\u020f\u0210\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000\u0000\u0211\u0212\u0005"+ - "\u001c\u0000\u0000\u0212a\u0001\u0000\u0000\u0000\u0213\u0214\u0005\u001b"+ - "\u0000\u0000\u0214c\u0001\u0000\u0000\u0000\u0215\u0216\u0007\u0007\u0000"+ - "\u0000\u0216e\u0001\u0000\u0000\u0000\u0217\u0218\u0005\u0005\u0000\u0000"+ - "\u0218\u0219\u0003h4\u0000\u0219g\u0001\u0000\u0000\u0000\u021a\u021b"+ - "\u0005B\u0000\u0000\u021b\u021c\u0003\u0002\u0001\u0000\u021c\u021d\u0005"+ - "C\u0000\u0000\u021di\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u000e\u0000"+ - "\u0000\u021f\u0220\u0005e\u0000\u0000\u0220k\u0001\u0000\u0000\u0000\u0221"+ - "\u0222\u0005\n\u0000\u0000\u0222\u0223\u0005i\u0000\u0000\u0223m\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0005\u0003\u0000\u0000\u0225\u0228\u0005"+ - "[\u0000\u0000\u0226\u0227\u0005Y\u0000\u0000\u0227\u0229\u00036\u001b"+ - "\u0000\u0228\u0226\u0001\u0000\u0000\u0000\u0228\u0229\u0001\u0000\u0000"+ - "\u0000\u0229\u0233\u0001\u0000\u0000\u0000\u022a\u022b\u0005Z\u0000\u0000"+ - "\u022b\u0230\u0003p8\u0000\u022c\u022d\u0005#\u0000\u0000\u022d\u022f"+ - "\u0003p8\u0000\u022e\u022c\u0001\u0000\u0000\u0000\u022f\u0232\u0001\u0000"+ - "\u0000\u0000\u0230\u022e\u0001\u0000\u0000\u0000\u0230\u0231\u0001\u0000"+ - "\u0000\u0000\u0231\u0234\u0001\u0000\u0000\u0000\u0232\u0230\u0001\u0000"+ - "\u0000\u0000\u0233\u022a\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000"+ - "\u0000\u0000\u0234o\u0001\u0000\u0000\u0000\u0235\u0236\u00036\u001b\u0000"+ - "\u0236\u0237\u0005!\u0000\u0000\u0237\u0239\u0001\u0000\u0000\u0000\u0238"+ - "\u0235\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ - "\u023a\u0001\u0000\u0000\u0000\u023a\u023b\u00036\u001b\u0000\u023bq\u0001"+ - "\u0000\u0000\u0000\u023c\u023d\u0005\u0013\u0000\u0000\u023d\u023e\u0003"+ - "\"\u0011\u0000\u023e\u023f\u0005Y\u0000\u0000\u023f\u0240\u00038\u001c"+ - "\u0000\u0240s\u0001\u0000\u0000\u0000\u0241\u0242\u0005\u0012\u0000\u0000"+ - "\u0242\u0245\u0003\u001c\u000e\u0000\u0243\u0244\u0005\u001e\u0000\u0000"+ - "\u0244\u0246\u0003\u001c\u000e\u0000\u0245\u0243\u0001\u0000\u0000\u0000"+ - "\u0245\u0246\u0001\u0000\u0000\u0000\u0246u\u0001\u0000\u0000\u0000\u0247"+ - "\u0248\u0005\u0014\u0000\u0000\u0248\u0249\u0003x<\u0000\u0249w\u0001"+ - "\u0000\u0000\u0000\u024a\u024b\u0005\u001b\u0000\u0000\u024by\u0001\u0000"+ - "\u0000\u00006\u0085\u008f\u00a3\u00af\u00b8\u00c0\u00c6\u00ce\u00d0\u00d5"+ - "\u00dc\u00e1\u00ec\u00f2\u00fa\u00fc\u0107\u010e\u0119\u011c\u012a\u0132"+ - "\u013a\u013e\u0145\u014d\u0155\u0162\u0166\u016a\u0171\u0175\u017c\u0184"+ - "\u018c\u01a2\u01ad\u01b8\u01bd\u01c1\u01cc\u01d1\u01d5\u01e3\u01ee\u01fc"+ - "\u0207\u020a\u020f\u0228\u0230\u0233\u0238\u0245"; + "\u0005\u001f\u01a6\b\u001f\n\u001f\f\u001f\u01a9\t\u001f\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1"+ + "\b\u001f\n\u001f\f\u001f\u01b4\t\u001f\u0001\u001f\u0001\u001f\u0003\u001f"+ + "\u01b8\b\u001f\u0001 \u0001 \u0003 \u01bc\b \u0001!\u0001!\u0001!\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c5\b\"\n\"\f\"\u01c8\t\"\u0001#"+ + "\u0001#\u0003#\u01cc\b#\u0001#\u0001#\u0003#\u01d0\b#\u0001$\u0001$\u0001"+ + "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01dc\b&\n&"+ + "\f&\u01df\t&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001"+ + "(\u0003(\u01e9\b(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ + "+\u0001+\u0001+\u0005+\u01f5\b+\n+\f+\u01f8\t+\u0001,\u0001,\u0001,\u0001"+ + ",\u0001-\u0001-\u0001.\u0001.\u0003.\u0202\b.\u0001/\u0003/\u0205\b/\u0001"+ + "/\u0001/\u00010\u00030\u020a\b0\u00010\u00010\u00011\u00011\u00012\u0001"+ + "2\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00037\u0223\b7\u0001"+ + "7\u00017\u00017\u00017\u00057\u0229\b7\n7\f7\u022c\t7\u00037\u022e\b7"+ + "\u00018\u00018\u00018\u00038\u0233\b8\u00018\u00018\u00019\u00019\u0001"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0240\b:\u0001:\u0000"+ + "\u0004\u0002\n\u0012\u0014;\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprt\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000\u001b"+ + "\u001bMM\u0001\u0000DE\u0002\u0000 $$\u0002\u0000\'\'**\u0002\u0000&"+ + "&44\u0002\u0000557;\u025b\u0000v\u0001\u0000\u0000\u0000\u0002y\u0001"+ + "\u0000\u0000\u0000\u0004\u008b\u0001\u0000\u0000\u0000\u0006\u009d\u0001"+ + "\u0000\u0000\u0000\b\u009f\u0001\u0000\u0000\u0000\n\u00c0\u0001\u0000"+ + "\u0000\u0000\f\u00db\u0001\u0000\u0000\u0000\u000e\u00dd\u0001\u0000\u0000"+ + "\u0000\u0010\u00e6\u0001\u0000\u0000\u0000\u0012\u00ec\u0001\u0000\u0000"+ + "\u0000\u0014\u0101\u0001\u0000\u0000\u0000\u0016\u010b\u0001\u0000\u0000"+ + "\u0000\u0018\u011a\u0001\u0000\u0000\u0000\u001a\u011c\u0001\u0000\u0000"+ + "\u0000\u001c\u011f\u0001\u0000\u0000\u0000\u001e\u012c\u0001\u0000\u0000"+ + "\u0000 \u012e\u0001\u0000\u0000\u0000\"\u013f\u0001\u0000\u0000\u0000"+ + "$\u0141\u0001\u0000\u0000\u0000&\u0143\u0001\u0000\u0000\u0000(\u0147"+ + "\u0001\u0000\u0000\u0000*\u0149\u0001\u0000\u0000\u0000,\u0152\u0001\u0000"+ + "\u0000\u0000.\u0156\u0001\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u0000"+ + "2\u0169\u0001\u0000\u0000\u00004\u0171\u0001\u0000\u0000\u00006\u0179"+ + "\u0001\u0000\u0000\u00008\u0181\u0001\u0000\u0000\u0000:\u0189\u0001\u0000"+ + "\u0000\u0000<\u018b\u0001\u0000\u0000\u0000>\u01b7\u0001\u0000\u0000\u0000"+ + "@\u01bb\u0001\u0000\u0000\u0000B\u01bd\u0001\u0000\u0000\u0000D\u01c0"+ + "\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01d1\u0001\u0000"+ + "\u0000\u0000J\u01d4\u0001\u0000\u0000\u0000L\u01d7\u0001\u0000\u0000\u0000"+ + "N\u01e0\u0001\u0000\u0000\u0000P\u01e4\u0001\u0000\u0000\u0000R\u01ea"+ + "\u0001\u0000\u0000\u0000T\u01ee\u0001\u0000\u0000\u0000V\u01f1\u0001\u0000"+ + "\u0000\u0000X\u01f9\u0001\u0000\u0000\u0000Z\u01fd\u0001\u0000\u0000\u0000"+ + "\\\u0201\u0001\u0000\u0000\u0000^\u0204\u0001\u0000\u0000\u0000`\u0209"+ + "\u0001\u0000\u0000\u0000b\u020d\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ + "\u0000\u0000f\u0211\u0001\u0000\u0000\u0000h\u0214\u0001\u0000\u0000\u0000"+ + "j\u0218\u0001\u0000\u0000\u0000l\u021b\u0001\u0000\u0000\u0000n\u021e"+ + "\u0001\u0000\u0000\u0000p\u0232\u0001\u0000\u0000\u0000r\u0236\u0001\u0000"+ + "\u0000\u0000t\u023b\u0001\u0000\u0000\u0000vw\u0003\u0002\u0001\u0000"+ + "wx\u0005\u0000\u0000\u0001x\u0001\u0001\u0000\u0000\u0000yz\u0006\u0001"+ + "\uffff\uffff\u0000z{\u0003\u0004\u0002\u0000{\u0081\u0001\u0000\u0000"+ + "\u0000|}\n\u0001\u0000\u0000}~\u0005\u001a\u0000\u0000~\u0080\u0003\u0006"+ + "\u0003\u0000\u007f|\u0001\u0000\u0000\u0000\u0080\u0083\u0001\u0000\u0000"+ + "\u0000\u0081\u007f\u0001\u0000\u0000\u0000\u0081\u0082\u0001\u0000\u0000"+ + "\u0000\u0082\u0003\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000"+ + "\u0000\u0084\u008c\u0003f3\u0000\u0085\u008c\u0003 \u0010\u0000\u0086"+ + "\u008c\u0003l6\u0000\u0087\u008c\u0003\u001a\r\u0000\u0088\u008c\u0003"+ + "j5\u0000\u0089\u008a\u0004\u0002\u0001\u0000\u008a\u008c\u0003.\u0017"+ + "\u0000\u008b\u0084\u0001\u0000\u0000\u0000\u008b\u0085\u0001\u0000\u0000"+ + "\u0000\u008b\u0086\u0001\u0000\u0000\u0000\u008b\u0087\u0001\u0000\u0000"+ + "\u0000\u008b\u0088\u0001\u0000\u0000\u0000\u008b\u0089\u0001\u0000\u0000"+ + "\u0000\u008c\u0005\u0001\u0000\u0000\u0000\u008d\u009e\u00030\u0018\u0000"+ + "\u008e\u009e\u0003\b\u0004\u0000\u008f\u009e\u0003H$\u0000\u0090\u009e"+ + "\u0003B!\u0000\u0091\u009e\u00032\u0019\u0000\u0092\u009e\u0003D\"\u0000"+ + "\u0093\u009e\u0003J%\u0000\u0094\u009e\u0003L&\u0000\u0095\u009e\u0003"+ + "P(\u0000\u0096\u009e\u0003R)\u0000\u0097\u009e\u0003n7\u0000\u0098\u009e"+ + "\u0003T*\u0000\u0099\u009a\u0004\u0003\u0002\u0000\u009a\u009e\u0003t"+ + ":\u0000\u009b\u009c\u0004\u0003\u0003\u0000\u009c\u009e\u0003r9\u0000"+ + "\u009d\u008d\u0001\u0000\u0000\u0000\u009d\u008e\u0001\u0000\u0000\u0000"+ + "\u009d\u008f\u0001\u0000\u0000\u0000\u009d\u0090\u0001\u0000\u0000\u0000"+ + "\u009d\u0091\u0001\u0000\u0000\u0000\u009d\u0092\u0001\u0000\u0000\u0000"+ + "\u009d\u0093\u0001\u0000\u0000\u0000\u009d\u0094\u0001\u0000\u0000\u0000"+ + "\u009d\u0095\u0001\u0000\u0000\u0000\u009d\u0096\u0001\u0000\u0000\u0000"+ + "\u009d\u0097\u0001\u0000\u0000\u0000\u009d\u0098\u0001\u0000\u0000\u0000"+ + "\u009d\u0099\u0001\u0000\u0000\u0000\u009d\u009b\u0001\u0000\u0000\u0000"+ + "\u009e\u0007\u0001\u0000\u0000\u0000\u009f\u00a0\u0005\u0011\u0000\u0000"+ + "\u00a0\u00a1\u0003\n\u0005\u0000\u00a1\t\u0001\u0000\u0000\u0000\u00a2"+ + "\u00a3\u0006\u0005\uffff\uffff\u0000\u00a3\u00a4\u0005-\u0000\u0000\u00a4"+ + "\u00c1\u0003\n\u0005\b\u00a5\u00c1\u0003\u0010\b\u0000\u00a6\u00c1\u0003"+ + "\f\u0006\u0000\u00a7\u00a9\u0003\u0010\b\u0000\u00a8\u00aa\u0005-\u0000"+ + "\u0000\u00a9\u00a8\u0001\u0000\u0000\u0000\u00a9\u00aa\u0001\u0000\u0000"+ + "\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab\u00ac\u0005(\u0000\u0000"+ + "\u00ac\u00ad\u0005,\u0000\u0000\u00ad\u00b2\u0003\u0010\b\u0000\u00ae"+ + "\u00af\u0005#\u0000\u0000\u00af\u00b1\u0003\u0010\b\u0000\u00b0\u00ae"+ + "\u0001\u0000\u0000\u0000\u00b1\u00b4\u0001\u0000\u0000\u0000\u00b2\u00b0"+ + "\u0001\u0000\u0000\u0000\u00b2\u00b3\u0001\u0000\u0000\u0000\u00b3\u00b5"+ + "\u0001\u0000\u0000\u0000\u00b4\u00b2\u0001\u0000\u0000\u0000\u00b5\u00b6"+ + "\u00053\u0000\u0000\u00b6\u00c1\u0001\u0000\u0000\u0000\u00b7\u00b8\u0003"+ + "\u0010\b\u0000\u00b8\u00ba\u0005)\u0000\u0000\u00b9\u00bb\u0005-\u0000"+ + "\u0000\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000"+ + "\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u0005.\u0000\u0000"+ + "\u00bd\u00c1\u0001\u0000\u0000\u0000\u00be\u00bf\u0004\u0005\u0004\u0000"+ + "\u00bf\u00c1\u0003\u000e\u0007\u0000\u00c0\u00a2\u0001\u0000\u0000\u0000"+ + "\u00c0\u00a5\u0001\u0000\u0000\u0000\u00c0\u00a6\u0001\u0000\u0000\u0000"+ + "\u00c0\u00a7\u0001\u0000\u0000\u0000\u00c0\u00b7\u0001\u0000\u0000\u0000"+ + "\u00c0\u00be\u0001\u0000\u0000\u0000\u00c1\u00ca\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c3\n\u0005\u0000\u0000\u00c3\u00c4\u0005\u001f\u0000\u0000\u00c4"+ + "\u00c9\u0003\n\u0005\u0006\u00c5\u00c6\n\u0004\u0000\u0000\u00c6\u00c7"+ + "\u00050\u0000\u0000\u00c7\u00c9\u0003\n\u0005\u0005\u00c8\u00c2\u0001"+ + "\u0000\u0000\u0000\u00c8\u00c5\u0001\u0000\u0000\u0000\u00c9\u00cc\u0001"+ + "\u0000\u0000\u0000\u00ca\u00c8\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001"+ + "\u0000\u0000\u0000\u00cb\u000b\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001"+ + "\u0000\u0000\u0000\u00cd\u00cf\u0003\u0010\b\u0000\u00ce\u00d0\u0005-"+ + "\u0000\u0000\u00cf\u00ce\u0001\u0000\u0000\u0000\u00cf\u00d0\u0001\u0000"+ + "\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005+\u0000"+ + "\u0000\u00d2\u00d3\u0003b1\u0000\u00d3\u00dc\u0001\u0000\u0000\u0000\u00d4"+ + "\u00d6\u0003\u0010\b\u0000\u00d5\u00d7\u0005-\u0000\u0000\u00d6\u00d5"+ + "\u0001\u0000\u0000\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u00d8"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u00052\u0000\u0000\u00d9\u00da\u0003"+ + "b1\u0000\u00da\u00dc\u0001\u0000\u0000\u0000\u00db\u00cd\u0001\u0000\u0000"+ + "\u0000\u00db\u00d4\u0001\u0000\u0000\u0000\u00dc\r\u0001\u0000\u0000\u0000"+ + "\u00dd\u00de\u0003\u0010\b\u0000\u00de\u00df\u0005\u0014\u0000\u0000\u00df"+ + "\u00e0\u0003b1\u0000\u00e0\u000f\u0001\u0000\u0000\u0000\u00e1\u00e7\u0003"+ + "\u0012\t\u0000\u00e2\u00e3\u0003\u0012\t\u0000\u00e3\u00e4\u0003d2\u0000"+ + "\u00e4\u00e5\u0003\u0012\t\u0000\u00e5\u00e7\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e1\u0001\u0000\u0000\u0000\u00e6\u00e2\u0001\u0000\u0000\u0000\u00e7"+ + "\u0011\u0001\u0000\u0000\u0000\u00e8\u00e9\u0006\t\uffff\uffff\u0000\u00e9"+ + "\u00ed\u0003\u0014\n\u0000\u00ea\u00eb\u0007\u0000\u0000\u0000\u00eb\u00ed"+ + "\u0003\u0012\t\u0003\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001"+ + "\u0000\u0000\u0000\u00ed\u00f6\u0001\u0000\u0000\u0000\u00ee\u00ef\n\u0002"+ + "\u0000\u0000\u00ef\u00f0\u0007\u0001\u0000\u0000\u00f0\u00f5\u0003\u0012"+ + "\t\u0003\u00f1\u00f2\n\u0001\u0000\u0000\u00f2\u00f3\u0007\u0000\u0000"+ + "\u0000\u00f3\u00f5\u0003\u0012\t\u0002\u00f4\u00ee\u0001\u0000\u0000\u0000"+ + "\u00f4\u00f1\u0001\u0000\u0000\u0000\u00f5\u00f8\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f4\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000"+ + "\u00f7\u0013\u0001\u0000\u0000\u0000\u00f8\u00f6\u0001\u0000\u0000\u0000"+ + "\u00f9\u00fa\u0006\n\uffff\uffff\u0000\u00fa\u0102\u0003>\u001f\u0000"+ + "\u00fb\u0102\u00034\u001a\u0000\u00fc\u0102\u0003\u0016\u000b\u0000\u00fd"+ + "\u00fe\u0005,\u0000\u0000\u00fe\u00ff\u0003\n\u0005\u0000\u00ff\u0100"+ + "\u00053\u0000\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00f9\u0001"+ + "\u0000\u0000\u0000\u0101\u00fb\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ + "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u0108\u0001"+ + "\u0000\u0000\u0000\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105\u0005\""+ + "\u0000\u0000\u0105\u0107\u0003\u0018\f\u0000\u0106\u0103\u0001\u0000\u0000"+ + "\u0000\u0107\u010a\u0001\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000"+ + "\u0000\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u0015\u0001\u0000\u0000"+ + "\u0000\u010a\u0108\u0001\u0000\u0000\u0000\u010b\u010c\u0003:\u001d\u0000"+ + "\u010c\u0116\u0005,\u0000\u0000\u010d\u0117\u0005>\u0000\u0000\u010e\u0113"+ + "\u0003\n\u0005\u0000\u010f\u0110\u0005#\u0000\u0000\u0110\u0112\u0003"+ + "\n\u0005\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0115\u0001\u0000"+ + "\u0000\u0000\u0113\u0111\u0001\u0000\u0000\u0000\u0113\u0114\u0001\u0000"+ + "\u0000\u0000\u0114\u0117\u0001\u0000\u0000\u0000\u0115\u0113\u0001\u0000"+ + "\u0000\u0000\u0116\u010d\u0001\u0000\u0000\u0000\u0116\u010e\u0001\u0000"+ + "\u0000\u0000\u0116\u0117\u0001\u0000\u0000\u0000\u0117\u0118\u0001\u0000"+ + "\u0000\u0000\u0118\u0119\u00053\u0000\u0000\u0119\u0017\u0001\u0000\u0000"+ + "\u0000\u011a\u011b\u0003:\u001d\u0000\u011b\u0019\u0001\u0000\u0000\u0000"+ + "\u011c\u011d\u0005\r\u0000\u0000\u011d\u011e\u0003\u001c\u000e\u0000\u011e"+ + "\u001b\u0001\u0000\u0000\u0000\u011f\u0124\u0003\u001e\u000f\u0000\u0120"+ + "\u0121\u0005#\u0000\u0000\u0121\u0123\u0003\u001e\u000f\u0000\u0122\u0120"+ + "\u0001\u0000\u0000\u0000\u0123\u0126\u0001\u0000\u0000\u0000\u0124\u0122"+ + "\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u001d"+ + "\u0001\u0000\u0000\u0000\u0126\u0124\u0001\u0000\u0000\u0000\u0127\u012d"+ + "\u0003\n\u0005\u0000\u0128\u0129\u00034\u001a\u0000\u0129\u012a\u0005"+ + "!\u0000\u0000\u012a\u012b\u0003\n\u0005\u0000\u012b\u012d\u0001\u0000"+ + "\u0000\u0000\u012c\u0127\u0001\u0000\u0000\u0000\u012c\u0128\u0001\u0000"+ + "\u0000\u0000\u012d\u001f\u0001\u0000\u0000\u0000\u012e\u012f\u0005\u0006"+ + "\u0000\u0000\u012f\u0134\u0003\"\u0011\u0000\u0130\u0131\u0005#\u0000"+ + "\u0000\u0131\u0133\u0003\"\u0011\u0000\u0132\u0130\u0001\u0000\u0000\u0000"+ + "\u0133\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000"+ + "\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u0138\u0001\u0000\u0000\u0000"+ + "\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u0139\u0003(\u0014\u0000\u0138"+ + "\u0137\u0001\u0000\u0000\u0000\u0138\u0139\u0001\u0000\u0000\u0000\u0139"+ + "!\u0001\u0000\u0000\u0000\u013a\u013b\u0003$\u0012\u0000\u013b\u013c\u0005"+ + "m\u0000\u0000\u013c\u013d\u0003&\u0013\u0000\u013d\u0140\u0001\u0000\u0000"+ + "\u0000\u013e\u0140\u0003&\u0013\u0000\u013f\u013a\u0001\u0000\u0000\u0000"+ + "\u013f\u013e\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141"+ + "\u0142\u0005M\u0000\u0000\u0142%\u0001\u0000\u0000\u0000\u0143\u0144\u0007"+ + "\u0002\u0000\u0000\u0144\'\u0001\u0000\u0000\u0000\u0145\u0148\u0003*"+ + "\u0015\u0000\u0146\u0148\u0003,\u0016\u0000\u0147\u0145\u0001\u0000\u0000"+ + "\u0000\u0147\u0146\u0001\u0000\u0000\u0000\u0148)\u0001\u0000\u0000\u0000"+ + "\u0149\u014a\u0005L\u0000\u0000\u014a\u014f\u0005M\u0000\u0000\u014b\u014c"+ + "\u0005#\u0000\u0000\u014c\u014e\u0005M\u0000\u0000\u014d\u014b\u0001\u0000"+ + "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u014d\u0001\u0000"+ + "\u0000\u0000\u014f\u0150\u0001\u0000\u0000\u0000\u0150+\u0001\u0000\u0000"+ + "\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0152\u0153\u0005B\u0000\u0000"+ + "\u0153\u0154\u0003*\u0015\u0000\u0154\u0155\u0005C\u0000\u0000\u0155-"+ + "\u0001\u0000\u0000\u0000\u0156\u0157\u0005\u0015\u0000\u0000\u0157\u015c"+ + "\u0003\"\u0011\u0000\u0158\u0159\u0005#\u0000\u0000\u0159\u015b\u0003"+ + "\"\u0011\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u015e\u0001\u0000"+ + "\u0000\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015c\u015d\u0001\u0000"+ + "\u0000\u0000\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000"+ + "\u0000\u0000\u015f\u0161\u0003\u001c\u000e\u0000\u0160\u015f\u0001\u0000"+ + "\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u0161\u0164\u0001\u0000"+ + "\u0000\u0000\u0162\u0163\u0005\u001e\u0000\u0000\u0163\u0165\u0003\u001c"+ + "\u000e\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000"+ + "\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166\u0167\u0005\u0004\u0000"+ + "\u0000\u0167\u0168\u0003\u001c\u000e\u0000\u01681\u0001\u0000\u0000\u0000"+ + "\u0169\u016b\u0005\u0010\u0000\u0000\u016a\u016c\u0003\u001c\u000e\u0000"+ + "\u016b\u016a\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000"+ + "\u016c\u016f\u0001\u0000\u0000\u0000\u016d\u016e\u0005\u001e\u0000\u0000"+ + "\u016e\u0170\u0003\u001c\u000e\u0000\u016f\u016d\u0001\u0000\u0000\u0000"+ + "\u016f\u0170\u0001\u0000\u0000\u0000\u01703\u0001\u0000\u0000\u0000\u0171"+ + "\u0176\u0003:\u001d\u0000\u0172\u0173\u0005%\u0000\u0000\u0173\u0175\u0003"+ + ":\u001d\u0000\u0174\u0172\u0001\u0000\u0000\u0000\u0175\u0178\u0001\u0000"+ + "\u0000\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000"+ + "\u0000\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u0176\u0001\u0000\u0000"+ + "\u0000\u0179\u017e\u0003<\u001e\u0000\u017a\u017b\u0005%\u0000\u0000\u017b"+ + "\u017d\u0003<\u001e\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u0180"+ + "\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017e\u017f"+ + "\u0001\u0000\u0000\u0000\u017f7\u0001\u0000\u0000\u0000\u0180\u017e\u0001"+ + "\u0000\u0000\u0000\u0181\u0186\u00036\u001b\u0000\u0182\u0183\u0005#\u0000"+ + "\u0000\u0183\u0185\u00036\u001b\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ + "\u0186\u0187\u0001\u0000\u0000\u0000\u01879\u0001\u0000\u0000\u0000\u0188"+ + "\u0186\u0001\u0000\u0000\u0000\u0189\u018a\u0007\u0003\u0000\u0000\u018a"+ + ";\u0001\u0000\u0000\u0000\u018b\u018c\u0005Q\u0000\u0000\u018c=\u0001"+ + "\u0000\u0000\u0000\u018d\u01b8\u0005.\u0000\u0000\u018e\u018f\u0003`0"+ + "\u0000\u018f\u0190\u0005D\u0000\u0000\u0190\u01b8\u0001\u0000\u0000\u0000"+ + "\u0191\u01b8\u0003^/\u0000\u0192\u01b8\u0003`0\u0000\u0193\u01b8\u0003"+ + "Z-\u0000\u0194\u01b8\u0003@ \u0000\u0195\u01b8\u0003b1\u0000\u0196\u0197"+ + "\u0005B\u0000\u0000\u0197\u019c\u0003\\.\u0000\u0198\u0199\u0005#\u0000"+ + "\u0000\u0199\u019b\u0003\\.\u0000\u019a\u0198\u0001\u0000\u0000\u0000"+ + "\u019b\u019e\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000"+ + "\u019c\u019d\u0001\u0000\u0000\u0000\u019d\u019f\u0001\u0000\u0000\u0000"+ + "\u019e\u019c\u0001\u0000\u0000\u0000\u019f\u01a0\u0005C\u0000\u0000\u01a0"+ + "\u01b8\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005B\u0000\u0000\u01a2\u01a7"+ + "\u0003Z-\u0000\u01a3\u01a4\u0005#\u0000\u0000\u01a4\u01a6\u0003Z-\u0000"+ + "\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a9\u0001\u0000\u0000\u0000"+ + "\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000"+ + "\u01a8\u01aa\u0001\u0000\u0000\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000"+ + "\u01aa\u01ab\u0005C\u0000\u0000\u01ab\u01b8\u0001\u0000\u0000\u0000\u01ac"+ + "\u01ad\u0005B\u0000\u0000\u01ad\u01b2\u0003b1\u0000\u01ae\u01af\u0005"+ + "#\u0000\u0000\u01af\u01b1\u0003b1\u0000\u01b0\u01ae\u0001\u0000\u0000"+ + "\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001\u0000\u0000"+ + "\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b5\u0001\u0000\u0000"+ + "\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005C\u0000\u0000"+ + "\u01b6\u01b8\u0001\u0000\u0000\u0000\u01b7\u018d\u0001\u0000\u0000\u0000"+ + "\u01b7\u018e\u0001\u0000\u0000\u0000\u01b7\u0191\u0001\u0000\u0000\u0000"+ + "\u01b7\u0192\u0001\u0000\u0000\u0000\u01b7\u0193\u0001\u0000\u0000\u0000"+ + "\u01b7\u0194\u0001\u0000\u0000\u0000\u01b7\u0195\u0001\u0000\u0000\u0000"+ + "\u01b7\u0196\u0001\u0000\u0000\u0000\u01b7\u01a1\u0001\u0000\u0000\u0000"+ + "\u01b7\u01ac\u0001\u0000\u0000\u0000\u01b8?\u0001\u0000\u0000\u0000\u01b9"+ + "\u01bc\u00051\u0000\u0000\u01ba\u01bc\u0005A\u0000\u0000\u01bb\u01b9\u0001"+ + "\u0000\u0000\u0000\u01bb\u01ba\u0001\u0000\u0000\u0000\u01bcA\u0001\u0000"+ + "\u0000\u0000\u01bd\u01be\u0005\t\u0000\u0000\u01be\u01bf\u0005\u001c\u0000"+ + "\u0000\u01bfC\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\u000f\u0000\u0000"+ + "\u01c1\u01c6\u0003F#\u0000\u01c2\u01c3\u0005#\u0000\u0000\u01c3\u01c5"+ + "\u0003F#\u0000\u01c4\u01c2\u0001\u0000\u0000\u0000\u01c5\u01c8\u0001\u0000"+ + "\u0000\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000"+ + "\u0000\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000"+ + "\u0000\u01c9\u01cb\u0003\n\u0005\u0000\u01ca\u01cc\u0007\u0004\u0000\u0000"+ + "\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000"+ + "\u01cc\u01cf\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005/\u0000\u0000\u01ce"+ + "\u01d0\u0007\u0005\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf"+ + "\u01d0\u0001\u0000\u0000\u0000\u01d0G\u0001\u0000\u0000\u0000\u01d1\u01d2"+ + "\u0005\b\u0000\u0000\u01d2\u01d3\u00038\u001c\u0000\u01d3I\u0001\u0000"+ + "\u0000\u0000\u01d4\u01d5\u0005\u0002\u0000\u0000\u01d5\u01d6\u00038\u001c"+ + "\u0000\u01d6K\u0001\u0000\u0000\u0000\u01d7\u01d8\u0005\f\u0000\u0000"+ + "\u01d8\u01dd\u0003N\'\u0000\u01d9\u01da\u0005#\u0000\u0000\u01da\u01dc"+ + "\u0003N\'\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc\u01df\u0001"+ + "\u0000\u0000\u0000\u01dd\u01db\u0001\u0000\u0000\u0000\u01dd\u01de\u0001"+ + "\u0000\u0000\u0000\u01deM\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000"+ + "\u0000\u0000\u01e0\u01e1\u00036\u001b\u0000\u01e1\u01e2\u0005U\u0000\u0000"+ + "\u01e2\u01e3\u00036\u001b\u0000\u01e3O\u0001\u0000\u0000\u0000\u01e4\u01e5"+ + "\u0005\u0001\u0000\u0000\u01e5\u01e6\u0003\u0014\n\u0000\u01e6\u01e8\u0003"+ + "b1\u0000\u01e7\u01e9\u0003V+\u0000\u01e8\u01e7\u0001\u0000\u0000\u0000"+ + "\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9Q\u0001\u0000\u0000\u0000\u01ea"+ + "\u01eb\u0005\u0007\u0000\u0000\u01eb\u01ec\u0003\u0014\n\u0000\u01ec\u01ed"+ + "\u0003b1\u0000\u01edS\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u000b"+ + "\u0000\u0000\u01ef\u01f0\u00034\u001a\u0000\u01f0U\u0001\u0000\u0000\u0000"+ + "\u01f1\u01f6\u0003X,\u0000\u01f2\u01f3\u0005#\u0000\u0000\u01f3\u01f5"+ + "\u0003X,\u0000\u01f4\u01f2\u0001\u0000\u0000\u0000\u01f5\u01f8\u0001\u0000"+ + "\u0000\u0000\u01f6\u01f4\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000"+ + "\u0000\u0000\u01f7W\u0001\u0000\u0000\u0000\u01f8\u01f6\u0001\u0000\u0000"+ + "\u0000\u01f9\u01fa\u0003:\u001d\u0000\u01fa\u01fb\u0005!\u0000\u0000\u01fb"+ + "\u01fc\u0003>\u001f\u0000\u01fcY\u0001\u0000\u0000\u0000\u01fd\u01fe\u0007"+ + "\u0006\u0000\u0000\u01fe[\u0001\u0000\u0000\u0000\u01ff\u0202\u0003^/"+ + "\u0000\u0200\u0202\u0003`0\u0000\u0201\u01ff\u0001\u0000\u0000\u0000\u0201"+ + "\u0200\u0001\u0000\u0000\u0000\u0202]\u0001\u0000\u0000\u0000\u0203\u0205"+ + "\u0007\u0000\u0000\u0000\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205"+ + "\u0001\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0207"+ + "\u0005\u001d\u0000\u0000\u0207_\u0001\u0000\u0000\u0000\u0208\u020a\u0007"+ + "\u0000\u0000\u0000\u0209\u0208\u0001\u0000\u0000\u0000\u0209\u020a\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0005"+ + "\u001c\u0000\u0000\u020ca\u0001\u0000\u0000\u0000\u020d\u020e\u0005\u001b"+ + "\u0000\u0000\u020ec\u0001\u0000\u0000\u0000\u020f\u0210\u0007\u0007\u0000"+ + "\u0000\u0210e\u0001\u0000\u0000\u0000\u0211\u0212\u0005\u0005\u0000\u0000"+ + "\u0212\u0213\u0003h4\u0000\u0213g\u0001\u0000\u0000\u0000\u0214\u0215"+ + "\u0005B\u0000\u0000\u0215\u0216\u0003\u0002\u0001\u0000\u0216\u0217\u0005"+ + "C\u0000\u0000\u0217i\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u000e\u0000"+ + "\u0000\u0219\u021a\u0005e\u0000\u0000\u021ak\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0005\n\u0000\u0000\u021c\u021d\u0005i\u0000\u0000\u021dm\u0001"+ + "\u0000\u0000\u0000\u021e\u021f\u0005\u0003\u0000\u0000\u021f\u0222\u0005"+ + "[\u0000\u0000\u0220\u0221\u0005Y\u0000\u0000\u0221\u0223\u00036\u001b"+ + "\u0000\u0222\u0220\u0001\u0000\u0000\u0000\u0222\u0223\u0001\u0000\u0000"+ + "\u0000\u0223\u022d\u0001\u0000\u0000\u0000\u0224\u0225\u0005Z\u0000\u0000"+ + "\u0225\u022a\u0003p8\u0000\u0226\u0227\u0005#\u0000\u0000\u0227\u0229"+ + "\u0003p8\u0000\u0228\u0226\u0001\u0000\u0000\u0000\u0229\u022c\u0001\u0000"+ + "\u0000\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022a\u022b\u0001\u0000"+ + "\u0000\u0000\u022b\u022e\u0001\u0000\u0000\u0000\u022c\u022a\u0001\u0000"+ + "\u0000\u0000\u022d\u0224\u0001\u0000\u0000\u0000\u022d\u022e\u0001\u0000"+ + "\u0000\u0000\u022eo\u0001\u0000\u0000\u0000\u022f\u0230\u00036\u001b\u0000"+ + "\u0230\u0231\u0005!\u0000\u0000\u0231\u0233\u0001\u0000\u0000\u0000\u0232"+ + "\u022f\u0001\u0000\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233"+ + "\u0234\u0001\u0000\u0000\u0000\u0234\u0235\u00036\u001b\u0000\u0235q\u0001"+ + "\u0000\u0000\u0000\u0236\u0237\u0005\u0013\u0000\u0000\u0237\u0238\u0003"+ + "\"\u0011\u0000\u0238\u0239\u0005Y\u0000\u0000\u0239\u023a\u00038\u001c"+ + "\u0000\u023as\u0001\u0000\u0000\u0000\u023b\u023c\u0005\u0012\u0000\u0000"+ + "\u023c\u023f\u0003\u001c\u000e\u0000\u023d\u023e\u0005\u001e\u0000\u0000"+ + "\u023e\u0240\u0003\u001c\u000e\u0000\u023f\u023d\u0001\u0000\u0000\u0000"+ + "\u023f\u0240\u0001\u0000\u0000\u0000\u0240u\u0001\u0000\u0000\u00006\u0081"+ + "\u008b\u009d\u00a9\u00b2\u00ba\u00c0\u00c8\u00ca\u00cf\u00d6\u00db\u00e6"+ + "\u00ec\u00f4\u00f6\u0101\u0108\u0113\u0116\u0124\u012c\u0134\u0138\u013f"+ + "\u0147\u014f\u015c\u0160\u0164\u016b\u016f\u0176\u017e\u0186\u019c\u01a7"+ + "\u01b2\u01b7\u01bb\u01c6\u01cb\u01cf\u01dd\u01e8\u01f6\u0201\u0204\u0209"+ + "\u0222\u022a\u022d\u0232\u023f"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 7db53cb7713c6..192b169cc9587 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1016,30 +1016,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterMatchCommand(EsqlBaseParser.MatchCommandContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitMatchCommand(EsqlBaseParser.MatchCommandContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterMatchQuery(EsqlBaseParser.MatchQueryContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitMatchQuery(EsqlBaseParser.MatchQueryContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 446cdd4cd7834..de98d4333c1d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -601,18 +601,4 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMatchCommand(EsqlBaseParser.MatchCommandContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMatchQuery(EsqlBaseParser.MatchQueryContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 0c39b3ea83fa9..4348c641d9f69 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -913,24 +913,4 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#matchCommand}. - * @param ctx the parse tree - */ - void enterMatchCommand(EsqlBaseParser.MatchCommandContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#matchCommand}. - * @param ctx the parse tree - */ - void exitMatchCommand(EsqlBaseParser.MatchCommandContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#matchQuery}. - * @param ctx the parse tree - */ - void enterMatchQuery(EsqlBaseParser.MatchQueryContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#matchQuery}. - * @param ctx the parse tree - */ - void exitMatchQuery(EsqlBaseParser.MatchQueryContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 31c9371b9f806..c334526abfe39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -550,16 +550,4 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#matchCommand}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMatchCommand(EsqlBaseParser.MatchCommandContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#matchQuery}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMatchQuery(EsqlBaseParser.MatchQueryContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index cc6273d4de292..8dc07e2e1017f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -354,23 +353,6 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { return input -> new Filter(source(ctx), input, expression); } - @Override - public PlanFactory visitMatchCommand(EsqlBaseParser.MatchCommandContext ctx) { - if (Build.current().isSnapshot() == false) { - throw new ParsingException(source(ctx), "MATCH command currently requires a snapshot build"); - } - - StringQueryPredicate stringQueryPredicate = visitMatchQuery(ctx.matchQuery()); - return input -> new Filter(source(ctx), input, stringQueryPredicate); - } - - @Override - public StringQueryPredicate visitMatchQuery(EsqlBaseParser.MatchQueryContext ctx) { - Source source = source(ctx); - String queryString = unquote(ctx.QUOTED_STRING().getText()); - return new StringQueryPredicate(source, queryString, null); - } - @Override public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 3e8d1e4e71562..a0719286a4009 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -247,10 +247,6 @@ public final void test() throws Throwable { "multiple indices aren't supported", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.UNION_TYPES.capabilityName()) ); - assumeFalse( - "can't use match command in csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_COMMAND.capabilityName()) - ); assumeFalse( "can't use QSTR function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.QSTR_FUNCTION.capabilityName()) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index a14c6bf22d532..106e58c3f89d9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -10,16 +10,10 @@ import org.elasticsearch.Build; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.LoadMapping; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -56,7 +50,6 @@ import org.elasticsearch.xpack.esql.session.IndexResolver; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -2132,13 +2125,6 @@ private static LogicalPlan analyzeWithEmptyFieldCapsResponse(String query) throw return analyze(query, analyzer); } - private static FieldCapabilitiesResponse readFieldCapsResponse(String resourceName) throws IOException { - InputStream stream = AnalyzerTests.class.getResourceAsStream("/" + resourceName); - BytesReference ref = Streams.readFully(stream); - XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, ref, XContentType.JSON); - return FieldCapabilitiesResponse.fromXContent(parser); - } - private void assertEmptyEsRelation(LogicalPlan plan) { assertThat(plan, instanceOf(EsRelation.class)); EsRelation esRelation = (EsRelation) plan; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 0b83b76992546..2012e319510af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.EsqlParser; -import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.parser.QueryParams; @@ -1077,36 +1076,6 @@ public void testMatchFilter() throws Exception { ); } - public void testMatchCommand() { - assertMatchCommand("1:24:", "LIMIT", "from test | limit 10 | match \"Anna\""); - assertMatchCommand("1:13:", "SHOW", "show info | match \"8.16.0\""); - assertMatchCommand("1:17:", "ROW", "row a= \"Anna\" | match \"Anna\""); - assertMatchCommand("1:26:", "EVAL", "from test | eval z = 2 | match \"Anna\""); - assertMatchCommand("1:43:", "DISSECT", "from test | dissect first_name \"%{foo}\" | match \"Connection\""); - assertMatchCommand("1:27:", "DROP", "from test | drop emp_no | match \"Anna\""); - assertMatchCommand("1:35:", "EVAL", "from test | eval n = emp_no * 3 | match \"Anna\""); - assertMatchCommand("1:44:", "GROK", "from test | grok last_name \"%{WORD:foo}\" | match \"Anna\""); - assertMatchCommand("1:27:", "KEEP", "from test | keep emp_no | match \"Anna\""); - - // TODO Keep adding tests for all unsupported commands - } - - private void assertMatchCommand(String lineAndColumn, String command, String query) { - String message; - Class exception; - var isSnapshot = Build.current().isSnapshot(); - if (isSnapshot) { - message = " MATCH cannot be used after "; - exception = VerificationException.class; - } else { - message = " mismatched input 'match' expecting "; - exception = ParsingException.class; - } - - var expectedErrorMessage = lineAndColumn + message + (isSnapshot ? command : ""); - assertThat(error(query, defaultAnalyzer, exception), containsString(expectedErrorMessage)); - } - public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { assumeTrue("skipping because QSTR is not enabled", EsqlCapabilities.Cap.QSTR_FUNCTION.isEnabled()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 2ed0093945837..c2779b7dbc46d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -609,101 +609,6 @@ public void testQueryStringFunctionMultipleQstrClauses() { assertThat(query.query().toString(), is(expected.toString())); } - /** - * Expecting - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[],false] - * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na - * me{f}#6, long_noidx{f}#11, salary{f}#7]] - * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3] - * \_EsQueryExec[test], indexMode[standard], query[{"query_string":{"query":"\"last_name: Smith\"" - */ - public void testMatchCommand() { - assumeTrue("skipping because MATCH_COMMAND is not enabled", EsqlCapabilities.Cap.MATCH_COMMAND.isEnabled()); - var plan = plannerOptimizer.plan(""" - from test - | match "last_name: Smith" - """, IS_SV_STATS); - - var limit = as(plan, LimitExec.class); - var exchange = as(limit.child(), ExchangeExec.class); - var project = as(exchange.child(), ProjectExec.class); - var field = as(project.child(), FieldExtractExec.class); - var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); - var expected = QueryBuilders.queryStringQuery("last_name: Smith"); - assertThat(query.query().toString(), is(expected.toString())); - } - - /** - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[],false] - * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8]] - * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen] - * \_EsQueryExec[test], indexMode[standard], - * query[{"bool":{ "must":[{ - * "esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}}}, - * {"query_string":{"query":"last_name: Smith","fields":[]}}],"boost":1.0} - * }] - */ - public void testMatchCommandWithWhereClause() { - assumeTrue("skipping because MATCH_COMMAND is not enabled", EsqlCapabilities.Cap.MATCH_COMMAND.isEnabled()); - String queryText = """ - from test - | where emp_no > 10010 - | match "last_name: Smith" - """; - var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); - - var limit = as(plan, LimitExec.class); - var exchange = as(limit.child(), ExchangeExec.class); - var project = as(exchange.child(), ProjectExec.class); - var field = as(project.child(), FieldExtractExec.class); - var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); - - Source source = new Source(2, 8, "emp_no > 10010"); - var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", source); - var queryString = QueryBuilders.queryStringQuery("last_name: Smith"); - var expected = QueryBuilders.boolQuery().must(range).must(queryString); - assertThat(query.query().toString(), is(expected.toString())); - } - - /** - * TopNExec[[Order[emp_no{f}#3,ASC,LAST]],1000[INTEGER],0] - * \_ExchangeExec[[],false] - * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n - * ame{f}#7, long_noidx{f}#12, salary{f}#8]] - * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen] - * \_EsQueryExec[test], - * query[{"bool":{"must":[{"query_string":{"query":"last_name: Smith","fields":[]}}, - * {"query_string":{"query":"John","fields":[]}}],"boost":1.0}}] - * sort[[FieldSort[field=emp_no{f}#3, direction=ASC, nulls=LAST]]] - */ - public void testMatchCommandWithMultipleMatches() { - assumeTrue("skipping because MATCH_COMMAND is not enabled", EsqlCapabilities.Cap.MATCH_COMMAND.isEnabled()); - var plan = plannerOptimizer.plan(""" - from test - | match "last_name: Smith" - | sort emp_no - | MATCH "John" - """, IS_SV_STATS); - - var limit = as(plan, TopNExec.class); - var exchange = as(limit.child(), ExchangeExec.class); - var project = as(exchange.child(), ProjectExec.class); - var field = as(project.child(), FieldExtractExec.class); - var query = as(field.child(), EsQueryExec.class); - assertThat(query.limit().fold(), is(1000)); - - Source source = new Source(2, 8, "emp_no > 10010"); - var queryString1 = QueryBuilders.queryStringQuery("last_name: Smith"); - var queryString2 = QueryBuilders.queryStringQuery("John"); - var expected = QueryBuilders.boolQuery().must(queryString1).must(queryString2); - assertThat(query.query().toString(), is(expected.toString())); - } - // optimizer doesn't know yet how to break down different multi count public void testCountFieldsAndAllWithFilter() { var plan = plannerOptimizer.plan(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 3ee7509ea1530..c5a5bfac023c1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -51,7 +50,6 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -993,17 +991,6 @@ public void testInputParams() { assertThat(alias.child().fold(), is(11)); } - public void testMatchCommand() throws IOException { - assumeTrue("Match command available just for snapshots", Build.current().isSnapshot()); - String queryString = "field: value"; - assertEquals( - new Filter(EMPTY, PROCESSING_CMD_INPUT, new StringQueryPredicate(EMPTY, queryString, null)), - processingCommand("match \"" + queryString + "\"") - ); - - expectError("from a | match an unquoted string", "mismatched input 'an' expecting QUOTED_STRING"); - } - public void testMissingInputParams() { expectError("row x = ?, y = ?", List.of(new QueryParam(null, 1, INTEGER)), "Not enough actual parameters 1"); } diff --git a/x-pack/plugin/identity-provider/src/internalClusterTest/java/org/elasticsearch/xpack/idp/saml/test/IdentityProviderIntegTestCase.java b/x-pack/plugin/identity-provider/src/internalClusterTest/java/org/elasticsearch/xpack/idp/saml/test/IdentityProviderIntegTestCase.java index f02ccae7b8f29..60f95f2e56fd2 100644 --- a/x-pack/plugin/identity-provider/src/internalClusterTest/java/org/elasticsearch/xpack/idp/saml/test/IdentityProviderIntegTestCase.java +++ b/x-pack/plugin/identity-provider/src/internalClusterTest/java/org/elasticsearch/xpack/idp/saml/test/IdentityProviderIntegTestCase.java @@ -158,7 +158,7 @@ protected Function getClientWrapper() { // user. This is ok for internal n2n stuff but the test framework does other things like wiping indices, repositories, etc // that the system user cannot do. so we wrap the node client with a user that can do these things since the client() calls // return a node client - return client -> (client instanceof NodeClient) ? client.filterWithHeader(headers) : client; + return client -> asInstanceOf(NodeClient.class, client).filterWithHeader(headers); } @Override diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java index 68894baa8f3cb..28f97adec8814 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java @@ -311,7 +311,7 @@ public void testDeleteOnlyIndexInDataStreamDeletesDataStream() throws Exception @SuppressWarnings("unchecked") public void testDataStreamWithMultipleIndicesAndWriteIndexInDeletePhase() throws Exception { - createComposableTemplate(client(), template, dataStream + "*", new Template(null, null, null, null)); + createComposableTemplate(client(), template, dataStream + "*", Template.builder().build()); indexDocument(client(), dataStream, true); createNewSingletonPolicy(client(), policyName, "delete", DeleteAction.NO_SNAPSHOT_DELETE); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index 4b59488e3707c..21924634ff6ab 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -1069,7 +1069,12 @@ static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java index 283e48a328aa7..e06c7bc2708ca 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java @@ -695,12 +695,7 @@ static List migrateComposableTemplates(Metadata.Builder mb, ClusterState settingsBuilder.remove(requireRoutingSetting); settingsBuilder.remove(includeRoutingSetting); settingsBuilder.remove(excludeRoutingSetting); - Template migratedInnerTemplate = new Template( - settingsBuilder.build(), - currentInnerTemplate.mappings(), - currentInnerTemplate.aliases(), - currentInnerTemplate.lifecycle() - ); + Template migratedInnerTemplate = Template.builder(currentInnerTemplate).settings(settingsBuilder).build(); migratedComposableTemplateBuilder.indexPatterns(composableTemplate.indexPatterns()); migratedComposableTemplateBuilder.template(migratedInnerTemplate); @@ -741,12 +736,7 @@ static List migrateComponentTemplates(Metadata.Builder mb, ClusterState settingsBuilder.remove(requireRoutingSetting); settingsBuilder.remove(includeRoutingSetting); settingsBuilder.remove(excludeRoutingSetting); - Template migratedInnerTemplate = new Template( - settingsBuilder.build(), - currentInnerTemplate.mappings(), - currentInnerTemplate.aliases(), - currentInnerTemplate.lifecycle() - ); + Template migratedInnerTemplate = Template.builder(currentInnerTemplate).settings(settingsBuilder).build(); ComponentTemplate migratedComponentTemplate = new ComponentTemplate( migratedInnerTemplate, diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java index 7fb47e901f703..6c15b42dc65d5 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; @@ -19,11 +18,11 @@ import static org.hamcrest.Matchers.containsString; -// Tests disabled in CI due to the models being too large to download. Can be enabled (commented out) for local testing -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105198") +// This test was previously disabled in CI due to the models being too large +// See "https://github.com/elastic/elasticsearch/issues/105198". public class TextEmbeddingCrudIT extends InferenceBaseRestTest { - public void testPutE5Small_withNoModelVariant() throws IOException { + public void testPutE5Small_withNoModelVariant() { { String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); expectThrows( @@ -51,6 +50,7 @@ public void testPutE5Small_withPlatformAgnosticVariant() throws IOException { deleteTextEmbeddingModel(inferenceEntityId); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105198") public void testPutE5Small_withPlatformSpecificVariant() throws IOException { String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); if ("linux-x86_64".equals(Platforms.PLATFORM_NAME)) { @@ -124,7 +124,7 @@ private String noModelIdVariantJsonEntity() { private String platformAgnosticModelVariantJsonEntity() { return """ { - "service": "text_embedding", + "service": "elasticsearch", "service_settings": { "num_allocations": 1, "num_threads": 1, @@ -137,7 +137,7 @@ private String platformAgnosticModelVariantJsonEntity() { private String platformSpecificModelVariantJsonEntity() { return """ { - "service": "text_embedding", + "service": "elasticsearch", "service_settings": { "num_allocations": 1, "num_threads": 1, @@ -150,7 +150,7 @@ private String platformSpecificModelVariantJsonEntity() { private String fakeModelVariantJsonEntity() { return """ { - "service": "text_embedding", + "service": "elasticsearch", "service_settings": { "num_allocations": 1, "num_threads": 1, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 12a32ecdc6d4f..fd330a8cf6cc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -9,6 +9,7 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; @@ -23,7 +24,8 @@ public class InferenceFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of( TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, - RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED + RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, + SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 81dfba769136b..0483296cd2c6a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -79,6 +80,8 @@ * A {@link FieldMapper} for semantic text fields. */ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFieldMapper { + public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); + public static final String CONTENT_TYPE = "semantic_text"; private final IndexSettings indexSettings; @@ -103,6 +106,13 @@ public static class Builder extends FieldMapper.Builder { } }); + private final Parameter searchInferenceId = Parameter.stringParam( + "search_inference_id", + true, + mapper -> ((SemanticTextFieldType) mapper.fieldType()).searchInferenceId, + null + ).acceptsNull(); + private final Parameter modelSettings = new Parameter<>( "model_settings", true, @@ -117,6 +127,17 @@ public static class Builder extends FieldMapper.Builder { private Function inferenceFieldBuilder; + public static Builder from(SemanticTextFieldMapper mapper) { + Builder builder = new Builder( + mapper.leafName(), + mapper.fieldType().indexVersionCreated, + mapper.fieldType().getChunksField().bitsetProducer(), + mapper.indexSettings + ); + builder.init(mapper); + return builder; + } + public Builder( String name, IndexVersion indexVersionCreated, @@ -140,6 +161,11 @@ public Builder setInferenceId(String id) { return this; } + public Builder setSearchInferenceId(String id) { + this.searchInferenceId.setValue(id); + return this; + } + public Builder setModelSettings(SemanticTextField.ModelSettings value) { this.modelSettings.setValue(value); return this; @@ -147,15 +173,17 @@ public Builder setModelSettings(SemanticTextField.ModelSettings value) { @Override protected Parameter[] getParameters() { - return new Parameter[] { inferenceId, modelSettings, meta }; + return new Parameter[] { inferenceId, searchInferenceId, modelSettings, meta }; } @Override protected void merge(FieldMapper mergeWith, Conflicts conflicts, MapperMergeContext mapperMergeContext) { - super.merge(mergeWith, conflicts, mapperMergeContext); + SemanticTextFieldMapper semanticMergeWith = (SemanticTextFieldMapper) mergeWith; + semanticMergeWith = copySettings(semanticMergeWith, mapperMergeContext); + + super.merge(semanticMergeWith, conflicts, mapperMergeContext); conflicts.check(); - var semanticMergeWith = (SemanticTextFieldMapper) mergeWith; - var context = mapperMergeContext.createChildContext(mergeWith.leafName(), ObjectMapper.Dynamic.FALSE); + var context = mapperMergeContext.createChildContext(semanticMergeWith.leafName(), ObjectMapper.Dynamic.FALSE); var inferenceField = inferenceFieldBuilder.apply(context.getMapperBuilderContext()); var mergedInferenceField = inferenceField.merge(semanticMergeWith.fieldType().getInferenceField(), context); inferenceFieldBuilder = c -> mergedInferenceField; @@ -181,6 +209,7 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { new SemanticTextFieldType( fullName, inferenceId.getValue(), + searchInferenceId.getValue(), modelSettings.getValue(), inferenceField, indexVersionCreated, @@ -190,6 +219,25 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { indexSettings ); } + + /** + * As necessary, copy settings from this builder to the passed-in mapper. + * Used to preserve {@link SemanticTextField.ModelSettings} when updating a semantic text mapping to one where the model settings + * are not specified. + * + * @param mapper The mapper + * @return A mapper with the copied settings applied + */ + private SemanticTextFieldMapper copySettings(SemanticTextFieldMapper mapper, MapperMergeContext mapperMergeContext) { + SemanticTextFieldMapper returnedMapper = mapper; + if (mapper.fieldType().getModelSettings() == null) { + Builder builder = from(mapper); + builder.setModelSettings(modelSettings.getValue()); + returnedMapper = builder.build(mapperMergeContext.getMapperBuilderContext()); + } + + return returnedMapper; + } } private SemanticTextFieldMapper( @@ -211,9 +259,7 @@ public Iterator iterator() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), fieldType().indexVersionCreated, fieldType().getChunksField().bitsetProducer(), indexSettings).init( - this - ); + return Builder.from(this); } @Override @@ -267,7 +313,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } } else { Conflicts conflicts = new Conflicts(fullFieldName); - canMergeModelSettings(field.inference().modelSettings(), fieldType().getModelSettings(), conflicts); + canMergeModelSettings(fieldType().getModelSettings(), field.inference().modelSettings(), conflicts); try { conflicts.check(); } catch (Exception exc) { @@ -316,7 +362,7 @@ public InferenceFieldMetadata getMetadata(Set sourcePaths) { String[] copyFields = sourcePaths.toArray(String[]::new); // ensure consistent order Arrays.sort(copyFields); - return new InferenceFieldMetadata(fullPath(), fieldType().inferenceId, copyFields); + return new InferenceFieldMetadata(fullPath(), fieldType().getInferenceId(), fieldType().getSearchInferenceId(), copyFields); } @Override @@ -335,6 +381,7 @@ public Object getOriginalValue(Map sourceAsMap) { public static class SemanticTextFieldType extends SimpleMappedFieldType { private final String inferenceId; + private final String searchInferenceId; private final SemanticTextField.ModelSettings modelSettings; private final ObjectMapper inferenceField; private final IndexVersion indexVersionCreated; @@ -342,6 +389,7 @@ public static class SemanticTextFieldType extends SimpleMappedFieldType { public SemanticTextFieldType( String name, String inferenceId, + String searchInferenceId, SemanticTextField.ModelSettings modelSettings, ObjectMapper inferenceField, IndexVersion indexVersionCreated, @@ -349,6 +397,7 @@ public SemanticTextFieldType( ) { super(name, true, false, false, TextSearchInfo.NONE, meta); this.inferenceId = inferenceId; + this.searchInferenceId = searchInferenceId; this.modelSettings = modelSettings; this.inferenceField = inferenceField; this.indexVersionCreated = indexVersionCreated; @@ -363,6 +412,10 @@ public String getInferenceId() { return inferenceId; } + public String getSearchInferenceId() { + return searchInferenceId == null ? inferenceId : searchInferenceId; + } + public SemanticTextField.ModelSettings getModelSettings() { return modelSettings; } @@ -428,14 +481,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost case SPARSE_EMBEDDING -> { if (inferenceResults instanceof TextExpansionResults == false) { throw new IllegalArgumentException( - "Field [" - + name() - + "] expected query inference results to be of type [" - + TextExpansionResults.NAME - + "]," - + " got [" - + inferenceResults.getWriteableName() - + "]. Has the inference endpoint configuration changed?" + generateQueryInferenceResultsTypeMismatchMessage(inferenceResults, TextExpansionResults.NAME) ); } @@ -454,14 +500,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost case TEXT_EMBEDDING -> { if (inferenceResults instanceof MlTextEmbeddingResults == false) { throw new IllegalArgumentException( - "Field [" - + name() - + "] expected query inference results to be of type [" - + MlTextEmbeddingResults.NAME - + "]," - + " got [" - + inferenceResults.getWriteableName() - + "]. Has the inference endpoint configuration changed?" + generateQueryInferenceResultsTypeMismatchMessage(inferenceResults, MlTextEmbeddingResults.NAME) ); } @@ -469,13 +508,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost float[] inference = textEmbeddingResults.getInferenceAsFloat(); if (inference.length != modelSettings.dimensions()) { throw new IllegalArgumentException( - "Field [" - + name() - + "] expected query inference results with " - + modelSettings.dimensions() - + " dimensions, got " - + inference.length - + " dimensions. Has the inference endpoint configuration changed?" + generateDimensionCountMismatchMessage(inference.length, modelSettings.dimensions()) ); } @@ -484,7 +517,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost default -> throw new IllegalStateException( "Field [" + name() - + "] configured to use an inference endpoint with an unsupported task type [" + + "] is configured to use an inference endpoint with an unsupported task type [" + modelSettings.taskType() + "]" ); @@ -493,6 +526,51 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost return new NestedQueryBuilder(nestedFieldPath, childQueryBuilder, ScoreMode.Max).boost(boost).queryName(queryName); } + + private String generateQueryInferenceResultsTypeMismatchMessage(InferenceResults inferenceResults, String expectedResultsType) { + StringBuilder sb = new StringBuilder( + "Field [" + + name() + + "] expected query inference results to be of type [" + + expectedResultsType + + "]," + + " got [" + + inferenceResults.getWriteableName() + + "]." + ); + + return generateInvalidQueryInferenceResultsMessage(sb); + } + + private String generateDimensionCountMismatchMessage(int inferenceDimCount, int expectedDimCount) { + StringBuilder sb = new StringBuilder( + "Field [" + + name() + + "] expected query inference results with " + + expectedDimCount + + " dimensions, got " + + inferenceDimCount + + " dimensions." + ); + + return generateInvalidQueryInferenceResultsMessage(sb); + } + + private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMessageBuilder) { + if (searchInferenceId != null && searchInferenceId.equals(inferenceId) == false) { + baseMessageBuilder.append( + " Is the search inference endpoint [" + + searchInferenceId + + "] compatible with the inference endpoint [" + + inferenceId + + "]?" + ); + } else { + baseMessageBuilder.append(" Has the configuration for inference endpoint [" + inferenceId + "] changed?"); + } + + return baseMessageBuilder.toString(); + } } private static ObjectMapper createInferenceField( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 7f21f94d33276..9f7fcb1ef407c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -284,7 +284,7 @@ private static String getInferenceIdForForField(Collection indexM String inferenceId = null; for (IndexMetadata indexMetadata : indexMetadataCollection) { InferenceFieldMetadata inferenceFieldMetadata = indexMetadata.getInferenceFields().get(fieldName); - String indexInferenceId = inferenceFieldMetadata != null ? inferenceFieldMetadata.getInferenceId() : null; + String indexInferenceId = inferenceFieldMetadata != null ? inferenceFieldMetadata.getSearchInferenceId() : null; if (indexInferenceId != null) { if (inferenceId != null && inferenceId.equals(indexInferenceId) == false) { throw new IllegalArgumentException("Field [" + fieldName + "] has multiple inference IDs associated with it"); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index bb0691c691176..1697b33fedd92 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -140,6 +141,7 @@ public MappedFieldType getMappedFieldType() { "fake-inference-id", null, null, + null, IndexVersion.current(), Map.of() ); @@ -210,13 +212,28 @@ public void testUpdatesToInferenceIdNotSupported() throws IOException { public void testDynamicUpdate() throws IOException { final String fieldName = "semantic"; final String inferenceId = "test_service"; + final String searchInferenceId = "search_test_service"; - MapperService mapperService = mapperServiceForFieldWithModelSettings( - fieldName, - inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) - ); - assertSemanticTextField(mapperService, fieldName, true); + { + MapperService mapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + ); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, inferenceId); + } + + { + MapperService mapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + searchInferenceId, + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + ); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, searchInferenceId); + } } public void testUpdateModelSettings() throws IOException { @@ -260,19 +277,11 @@ public void testUpdateModelSettings() throws IOException { assertSemanticTextField(mapperService, fieldName, true); } { - Exception exc = expectThrows( - IllegalArgumentException.class, - () -> merge( - mapperService, - mapping( - b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject() - ) - ) - ); - assertThat( - exc.getMessage(), - containsString("Cannot update parameter [model_settings] " + "from [task_type=sparse_embedding] to [null]") + merge( + mapperService, + mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()) ); + assertSemanticTextField(mapperService, fieldName, true); } { Exception exc = expectThrows( @@ -305,7 +314,60 @@ public void testUpdateModelSettings() throws IOException { } } - static void assertSemanticTextField(MapperService mapperService, String fieldName, boolean expectedModelSettings) { + public void testUpdateSearchInferenceId() throws IOException { + final String inferenceId = "test_inference_id"; + final String searchInferenceId1 = "test_search_inference_id_1"; + final String searchInferenceId2 = "test_search_inference_id_2"; + + CheckedBiFunction buildMapping = (f, sid) -> mapping(b -> { + b.startObject(f).field("type", "semantic_text").field("inference_id", inferenceId); + if (sid != null) { + b.field("search_inference_id", sid); + } + b.endObject(); + }); + + for (int depth = 1; depth < 5; depth++) { + String fieldName = randomFieldName(depth); + MapperService mapperService = createMapperService(buildMapping.apply(fieldName, null)); + assertSemanticTextField(mapperService, fieldName, false); + assertSearchInferenceId(mapperService, fieldName, inferenceId); + + merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1)); + assertSemanticTextField(mapperService, fieldName, false); + assertSearchInferenceId(mapperService, fieldName, searchInferenceId1); + + merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2)); + assertSemanticTextField(mapperService, fieldName, false); + assertSearchInferenceId(mapperService, fieldName, searchInferenceId2); + + merge(mapperService, buildMapping.apply(fieldName, null)); + assertSemanticTextField(mapperService, fieldName, false); + assertSearchInferenceId(mapperService, fieldName, inferenceId); + + mapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + ); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, inferenceId); + + merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1)); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, searchInferenceId1); + + merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2)); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, searchInferenceId2); + + merge(mapperService, buildMapping.apply(fieldName, null)); + assertSemanticTextField(mapperService, fieldName, true); + assertSearchInferenceId(mapperService, fieldName, inferenceId); + } + } + + private static void assertSemanticTextField(MapperService mapperService, String fieldName, boolean expectedModelSettings) { Mapper mapper = mapperService.mappingLookup().getMapper(fieldName); assertNotNull(mapper); assertThat(mapper, instanceOf(SemanticTextFieldMapper.class)); @@ -347,21 +409,34 @@ static void assertSemanticTextField(MapperService mapperService, String fieldNam } } + private static void assertSearchInferenceId(MapperService mapperService, String fieldName, String expectedSearchInferenceId) { + var fieldType = mapperService.fieldType(fieldName); + assertNotNull(fieldType); + assertThat(fieldType, instanceOf(SemanticTextFieldMapper.SemanticTextFieldType.class)); + SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType = (SemanticTextFieldMapper.SemanticTextFieldType) fieldType; + assertEquals(expectedSearchInferenceId, semanticTextFieldType.getSearchInferenceId()); + } + public void testSuccessfulParse() throws IOException { for (int depth = 1; depth < 4; depth++) { final String fieldName1 = randomFieldName(depth); final String fieldName2 = randomFieldName(depth + 1); + final String searchInferenceId = randomAlphaOfLength(8); + final boolean setSearchInferenceId = randomBoolean(); Model model1 = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING); Model model2 = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING); XContentBuilder mapping = mapping(b -> { - addSemanticTextMapping(b, fieldName1, model1.getInferenceEntityId()); - addSemanticTextMapping(b, fieldName2, model2.getInferenceEntityId()); + addSemanticTextMapping(b, fieldName1, model1.getInferenceEntityId(), setSearchInferenceId ? searchInferenceId : null); + addSemanticTextMapping(b, fieldName2, model2.getInferenceEntityId(), setSearchInferenceId ? searchInferenceId : null); }); MapperService mapperService = createMapperService(mapping); - SemanticTextFieldMapperTests.assertSemanticTextField(mapperService, fieldName1, false); - SemanticTextFieldMapperTests.assertSemanticTextField(mapperService, fieldName2, false); + assertSemanticTextField(mapperService, fieldName1, false); + assertSearchInferenceId(mapperService, fieldName1, setSearchInferenceId ? searchInferenceId : model1.getInferenceEntityId()); + assertSemanticTextField(mapperService, fieldName2, false); + assertSearchInferenceId(mapperService, fieldName2, setSearchInferenceId ? searchInferenceId : model2.getInferenceEntityId()); + DocumentMapper documentMapper = mapperService.documentMapper(); ParsedDocument doc = documentMapper.parse( source( @@ -449,7 +524,7 @@ public void testSuccessfulParse() throws IOException { } public void testMissingInferenceId() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id"))); + DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, @@ -468,7 +543,7 @@ public void testMissingInferenceId() throws IOException { } public void testMissingModelSettings() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id"))); + DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, @@ -480,7 +555,7 @@ public void testMissingModelSettings() throws IOException { } public void testMissingTaskType() throws IOException { - DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id"))); + DocumentMapper documentMapper = createDocumentMapper(mapping(b -> addSemanticTextMapping(b, "field", "my_id", null))); IllegalArgumentException ex = expectThrows( DocumentParsingException.class, IllegalArgumentException.class, @@ -540,12 +615,24 @@ private MapperService mapperServiceForFieldWithModelSettings( String inferenceId, SemanticTextField.ModelSettings modelSettings ) throws IOException { + return mapperServiceForFieldWithModelSettings(fieldName, inferenceId, null, modelSettings); + } + + private MapperService mapperServiceForFieldWithModelSettings( + String fieldName, + String inferenceId, + String searchInferenceId, + SemanticTextField.ModelSettings modelSettings + ) throws IOException { + String mappingParams = "type=semantic_text,inference_id=" + inferenceId; + if (searchInferenceId != null) { + mappingParams += ",search_inference_id=" + searchInferenceId; + } + MapperService mapperService = createMapperService(mapping(b -> {})); mapperService.merge( "_doc", - new CompressedXContent( - Strings.toString(PutMappingRequest.simpleMapping(fieldName, "type=semantic_text,inference_id=" + inferenceId)) - ), + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(fieldName, mappingParams))), MapperService.MergeReason.MAPPING_UPDATE ); @@ -615,10 +702,18 @@ protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneD assertThat(query, instanceOf(MatchNoDocsQuery.class)); } - private static void addSemanticTextMapping(XContentBuilder mappingBuilder, String fieldName, String modelId) throws IOException { + private static void addSemanticTextMapping( + XContentBuilder mappingBuilder, + String fieldName, + String inferenceId, + String searchInferenceId + ) throws IOException { mappingBuilder.startObject(fieldName); mappingBuilder.field("type", SemanticTextFieldMapper.CONTENT_TYPE); - mappingBuilder.field("inference_id", modelId); + mappingBuilder.field("inference_id", inferenceId); + if (searchInferenceId != null) { + mappingBuilder.field("search_inference_id", searchInferenceId); + } mappingBuilder.endObject(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index c2b99923bae61..f54ce89183079 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -79,9 +79,11 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase randomFrom(DenseVectorFieldMapper.ElementType.values()) ); // TODO: Support bit elements once KNN bit vector queries are available + useSearchInferenceId = randomBoolean(); } @Override @@ -126,11 +129,14 @@ protected Settings createTestIndexSettings() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + String mappingConfig = "type=semantic_text,inference_id=" + INFERENCE_ID; + if (useSearchInferenceId) { + mappingConfig += ",search_inference_id=" + SEARCH_INFERENCE_ID; + } + mapperService.merge( "_doc", - new CompressedXContent( - Strings.toString(PutMappingRequest.simpleMapping(SEMANTIC_TEXT_FIELD, "type=semantic_text,inference_id=" + INFERENCE_ID)) - ), + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(SEMANTIC_TEXT_FIELD, mappingConfig))), MapperService.MergeReason.MAPPING_UPDATE ); @@ -244,6 +250,7 @@ protected Object simulateMethod(Method method, Object[] args) { InferenceAction.Request request = (InferenceAction.Request) args[1]; assertThat(request.getTaskType(), equalTo(TaskType.ANY)); assertThat(request.getInputType(), equalTo(InputType.SEARCH)); + assertThat(request.getInferenceEntityId(), equalTo(useSearchInferenceId ? SEARCH_INFERENCE_ID : INFERENCE_ID)); List input = request.getInput(); assertThat(input.size(), equalTo(1)); diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 932ee4854f445..2070b3752791a 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -18,6 +18,21 @@ setup: } } + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id-2 + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + - do: inference.put: task_type: text_embedding @@ -35,6 +50,23 @@ setup: } } + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id-2 + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "api_key": "abc64", + "similarity": "COSINE" + }, + "task_settings": { + } + } + - do: indices.create: index: test-sparse-index @@ -142,6 +174,51 @@ setup: - match: { hits.hits.0._id: "doc_1" } - length: { hits.hits.0._source.inference_field.inference.chunks: 1 } +--- +"Query using a sparse embedding model via a search inference ID": + - requires: + cluster_features: "semantic_text.search_inference_id" + reason: search_inference_id introduced in 8.16.0 + + - skip: + features: [ "headers", "close_to" ] + + - do: + indices.put_mapping: + index: test-sparse-index + body: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + search_inference_id: sparse-inference-id-2 + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } + --- "Query using a dense embedding model": - skip: @@ -286,6 +363,51 @@ setup: - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Query using a dense embedding model via a search inference ID": + - requires: + cluster_features: "semantic_text.search_inference_id" + reason: search_inference_id introduced in 8.16.0 + + - skip: + features: [ "headers", "close_to" ] + + - do: + indices.put_mapping: + index: test-dense-index + body: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + search_inference_id: dense-inference-id-2 + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: ["inference test", "another inference test"] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } + --- "Apply boost and query name": - skip: @@ -581,3 +703,139 @@ setup: - match: { error.type: "resource_not_found_exception" } - match: { error.reason: "Inference endpoint not found [invalid-inference-id]" } + +--- +"Query a field with a search inference ID that uses the wrong task type": + - requires: + cluster_features: "semantic_text.search_inference_id" + reason: search_inference_id introduced in 8.16.0 + + - do: + indices.put_mapping: + index: test-sparse-index + body: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + search_inference_id: dense-inference-id + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + catch: bad_request + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { error.caused_by.type: "illegal_argument_exception" } + - match: { error.caused_by.reason: "Field [inference_field] expected query inference results to be of type + [text_expansion_result], got [text_embedding_result]. Is the search inference + endpoint [dense-inference-id] compatible with the inference endpoint + [sparse-inference-id]?" } + +--- +"Query a field with a search inference ID that uses the wrong dimension count": + - requires: + cluster_features: "semantic_text.search_inference_id" + reason: search_inference_id introduced in 8.16.0 + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id-20-dims + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 20, + "api_key": "abc64", + "similarity": "COSINE" + }, + "task_settings": { + } + } + + - do: + indices.put_mapping: + index: test-dense-index + body: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + search_inference_id: dense-inference-id-20-dims + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: ["inference test", "another inference test"] + non_inference_field: "non inference test" + refresh: true + + - do: + catch: bad_request + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { error.caused_by.type: "illegal_argument_exception" } + - match: { error.caused_by.reason: "Field [inference_field] expected query inference results with 10 dimensions, got + 20 dimensions. Is the search inference endpoint [dense-inference-id-20-dims] + compatible with the inference endpoint [dense-inference-id]?" } + +--- +"Query a field with an invalid search inference ID": + - requires: + cluster_features: "semantic_text.search_inference_id" + reason: search_inference_id introduced in 8.16.0 + + - do: + indices.put_mapping: + index: test-dense-index + body: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + search_inference_id: invalid-inference-id + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + catch: missing + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { error.type: "resource_not_found_exception" } + - match: { error.reason: "Inference endpoint not found [invalid-inference-id]" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml index f6a7073914609..51595d40737a3 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml @@ -112,8 +112,8 @@ setup: - match: { error.caused_by.type: "illegal_argument_exception" } - match: { error.caused_by.reason: "Field [inference_field] expected query inference results to be of type - [text_expansion_result], got [text_embedding_result]. Has the inference endpoint - configuration changed?" } + [text_expansion_result], got [text_embedding_result]. Has the configuration for + inference endpoint [sparse-inference-id] changed?" } --- "text_embedding changed to sparse_embedding": @@ -149,8 +149,8 @@ setup: - match: { error.caused_by.type: "illegal_argument_exception" } - match: { error.caused_by.reason: "Field [inference_field] expected query inference results to be of type - [text_embedding_result], got [text_expansion_result]. Has the inference endpoint - configuration changed?" } + [text_embedding_result], got [text_expansion_result]. Has the configuration for + inference endpoint [dense-inference-id] changed?" } --- "text_embedding dimension count changed": @@ -188,4 +188,5 @@ setup: - match: { error.caused_by.type: "illegal_argument_exception" } - match: { error.caused_by.reason: "Field [inference_field] expected query inference results with 10 dimensions, got - 20 dimensions. Has the inference endpoint configuration changed?" } + 20 dimensions. Has the configuration for inference endpoint [dense-inference-id] + changed?" } diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle new file mode 100644 index 0000000000000..5b7e45a90149d --- /dev/null +++ b/x-pack/plugin/logsdb/build.gradle @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.elasticsearch.gradle.internal.info.BuildParams + +evaluationDependsOn(xpackModule('core')) + +apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-java-rest-test' + +esplugin { + name 'logsdb' + description 'A plugin for logsdb related functionality' + classname 'org.elasticsearch.xpack.logsdb.LogsDBPlugin' + extendedPlugins = ['x-pack-core'] +} +base { + archivesName = 'x-pack-logsdb' +} + +dependencies { + compileOnly project(path: xpackModule('core')) + testImplementation(testArtifact(project(xpackModule('core')))) +} + +tasks.named("javaRestTest").configure { + usesDefaultDistribution() +} diff --git a/x-pack/plugin/logsdb/qa/build.gradle b/x-pack/plugin/logsdb/qa/build.gradle new file mode 100644 index 0000000000000..0f98e90b4d52e --- /dev/null +++ b/x-pack/plugin/logsdb/qa/build.gradle @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + diff --git a/x-pack/plugin/logsdb/qa/with-basic/build.gradle b/x-pack/plugin/logsdb/qa/with-basic/build.gradle new file mode 100644 index 0000000000000..2fdeed338e1c1 --- /dev/null +++ b/x-pack/plugin/logsdb/qa/with-basic/build.gradle @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + javaRestTestImplementation(testArtifact(project(xpackModule('core')))) +} + +tasks.named("javaRestTest").configure { + // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC + BuildParams.withFipsEnabledOnly(it) + + usesDefaultDistribution() +} diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java new file mode 100644 index 0000000000000..e7d267810424c --- /dev/null +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matchers; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class LogsdbRestIT extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "basic") + .setting("xpack.security.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testFeatureUsageWithLogsdbIndex() throws IOException { + { + var response = getAsMap("/_license/feature_usage"); + @SuppressWarnings("unchecked") + List> features = (List>) response.get("features"); + assertThat(features, Matchers.empty()); + } + { + createIndex("test-index", Settings.builder().put("index.mode", "logsdb").build()); + var response = getAsMap("/_license/feature_usage"); + @SuppressWarnings("unchecked") + List> features = (List>) response.get("features"); + assertThat(features, Matchers.empty()); + } + } + +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java new file mode 100644 index 0000000000000..efff6d0579838 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matchers; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class LogsdbRestIT extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testFeatureUsageWithLogsdbIndex() throws IOException { + { + var response = getAsMap("/_license/feature_usage"); + @SuppressWarnings("unchecked") + List> features = (List>) response.get("features"); + assertThat(features, Matchers.empty()); + } + { + createIndex("test-index", Settings.builder().put("index.mode", "logsdb").build()); + var response = getAsMap("/_license/feature_usage"); + @SuppressWarnings("unchecked") + List> features = (List>) response.get("features"); + logger.info("response's features: {}", features); + assertThat(features, Matchers.not(Matchers.empty())); + Map feature = features.stream().filter(map -> "mappings".equals(map.get("family"))).findFirst().get(); + assertThat(feature.get("name"), equalTo("synthetic-source")); + assertThat(feature.get("license_level"), equalTo("enterprise")); + } + } + +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java new file mode 100644 index 0000000000000..e38f953be96a3 --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.XPackPlugin; + +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; + +public class LogsDBPlugin extends Plugin { + + private final Settings settings; + private final SyntheticSourceLicenseService licenseService; + + public LogsDBPlugin(Settings settings) { + this.settings = settings; + this.licenseService = new SyntheticSourceLicenseService(settings); + } + + @Override + public Collection createComponents(PluginServices services) { + licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); + var clusterSettings = services.clusterService().getClusterSettings(); + clusterSettings.addSettingsUpdateConsumer(FALLBACK_SETTING, licenseService::setSyntheticSourceFallback); + // Nothing to share here: + return super.createComponents(services); + } + + @Override + public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { + if (DiscoveryNode.isStateless(settings)) { + return List.of(); + } + return List.of(new SyntheticSourceIndexSettingsProvider(licenseService)); + } + + @Override + public List> getSettings() { + return List.of(FALLBACK_SETTING); + } +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java new file mode 100644 index 0000000000000..5b7792de0622a --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.index.IndexSettings; + +import java.time.Instant; +import java.util.List; +import java.util.Locale; + +/** + * An index setting provider that overwrites the source mode from synthetic to stored if synthetic source isn't allowed to be used. + */ +public class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider { + + private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceIndexSettingsProvider.class); + + private final SyntheticSourceLicenseService syntheticSourceLicenseService; + + public SyntheticSourceIndexSettingsProvider(SyntheticSourceLicenseService syntheticSourceLicenseService) { + this.syntheticSourceLicenseService = syntheticSourceLicenseService; + } + + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + boolean isTimeSeries, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + if (newIndexHasSyntheticSourceUsage(indexTemplateAndCreateRequestSettings) + && syntheticSourceLicenseService.fallbackToStoredSource()) { + LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); + // TODO: handle falling back to stored source + } + return Settings.EMPTY; + } + + boolean newIndexHasSyntheticSourceUsage(Settings indexTemplateAndCreateRequestSettings) { + // TODO: build tmp MapperService and check whether SourceFieldMapper#isSynthetic() to determine synthetic source usage. + // Not using IndexSettings.MODE.get() to avoid validation that may fail at this point. + var rawIndexMode = indexTemplateAndCreateRequestSettings.get(IndexSettings.MODE.getKey()); + IndexMode indexMode = rawIndexMode != null ? Enum.valueOf(IndexMode.class, rawIndexMode.toUpperCase(Locale.ROOT)) : null; + return indexMode != null && indexMode.isSyntheticSourceEnabled(); + } +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java new file mode 100644 index 0000000000000..4e3e916762fab --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; + +/** + * Determines based on license and fallback setting whether synthetic source usages should fallback to stored source. + */ +public final class SyntheticSourceLicenseService { + + private static final String MAPPINGS_FEATURE_FAMILY = "mappings"; + + /** + * A setting that determines whether source mode should always be stored source. Regardless of licence. + */ + public static final Setting FALLBACK_SETTING = Setting.boolSetting( + "xpack.mapping.synthetic_source_fallback_to_stored_source", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final LicensedFeature.Momentary SYNTHETIC_SOURCE_FEATURE = LicensedFeature.momentary( + MAPPINGS_FEATURE_FAMILY, + "synthetic-source", + License.OperationMode.ENTERPRISE + ); + + private XPackLicenseState licenseState; + private volatile boolean syntheticSourceFallback; + + public SyntheticSourceLicenseService(Settings settings) { + syntheticSourceFallback = FALLBACK_SETTING.get(settings); + } + + /** + * @return whether synthetic source mode should fallback to stored source. + */ + public boolean fallbackToStoredSource() { + if (syntheticSourceFallback) { + return true; + } + + return SYNTHETIC_SOURCE_FEATURE.check(licenseState) == false; + } + + void setSyntheticSourceFallback(boolean syntheticSourceFallback) { + this.syntheticSourceFallback = syntheticSourceFallback; + } + + void setLicenseState(XPackLicenseState licenseState) { + this.licenseState = licenseState; + } +} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java new file mode 100644 index 0000000000000..2ca3a8d57f2eb --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.MockLicenseState; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SyntheticSourceLicenseServiceTests extends ESTestCase { + + public void testLicenseAllowsSyntheticSource() { + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(any())).thenReturn(true); + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + assertFalse("synthetic source is allowed, so not fallback to stored source", licenseService.fallbackToStoredSource()); + } + + public void testDefaultDisallow() { + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(any())).thenReturn(false); + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + assertTrue("synthetic source is not allowed, so fallback to stored source", licenseService.fallbackToStoredSource()); + } + + public void testFallback() { + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(any())).thenReturn(true); + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + licenseService.setSyntheticSourceFallback(true); + assertTrue( + "synthetic source is allowed, but fallback has been enabled, so fallback to stored source", + licenseService.fallbackToStoredSource() + ); + } + +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index e927c46e6bd29..a63d911e9d40d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -15,12 +15,17 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ExecutorBuilder; +import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.xpack.core.ml.packageloader.action.GetTrainedModelPackageConfigAction; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction; import org.elasticsearch.xpack.ml.packageloader.action.ModelDownloadTask; +import org.elasticsearch.xpack.ml.packageloader.action.ModelImporter; import org.elasticsearch.xpack.ml.packageloader.action.TransportGetTrainedModelPackageConfigAction; import org.elasticsearch.xpack.ml.packageloader.action.TransportLoadTrainedModelPackage; @@ -44,9 +49,6 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin Setting.Property.Dynamic ); - // re-using thread pool setup by the ml plugin - public static final String UTILITY_THREAD_POOL_NAME = "ml_utility"; - // This link will be invalid for serverless, but serverless will never be // air-gapped, so this message should never be needed. private static final String MODEL_REPOSITORY_DOCUMENTATION_LINK = format( @@ -54,6 +56,8 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin Build.current().version().replaceFirst("^(\\d+\\.\\d+).*", "$1") ); + public static final String MODEL_DOWNLOAD_THREADPOOL_NAME = "model_download"; + public MachineLearningPackageLoader() {} @Override @@ -81,6 +85,24 @@ public List getNamedWriteables() { ); } + @Override + public List> getExecutorBuilders(Settings settings) { + return List.of(modelDownloadExecutor(settings)); + } + + public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { + // Threadpool with a fixed number of threads for + // downloading the model definition files + return new FixedExecutorBuilder( + settings, + MODEL_DOWNLOAD_THREADPOOL_NAME, + ModelImporter.NUMBER_OF_STREAMS, + -1, // unbounded queue size + "xpack.ml.model_download_thread_pool", + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + ); + } + @Override public List getBootstrapChecks() { return List.of(new BootstrapCheck() { diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 33d5d5982d2b0..b155d6c73ccef 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -10,124 +10,265 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; +import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; -import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.core.Strings.format; /** - * A helper class for abstracting out the use of the ModelLoaderUtils to make dependency injection testing easier. + * For downloading and the vocabulary and model definition file and + * indexing those files in Elasticsearch. + * Holding the large model definition file in memory will consume + * too much memory, instead it is streamed in chunks and each chunk + * written to the index in a non-blocking request. + * The model files may be installed from a local file or download + * from a server. The server download uses {@link #NUMBER_OF_STREAMS} + * connections each using the Range header to split the stream by byte + * range. There is a complication in that the final part of the model + * definition must be uploaded last as writing this part causes an index + * refresh. + * When read from file a single thread is used to read the file + * stream, split into chunks and index those chunks. */ -class ModelImporter { +public class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB + public static final int NUMBER_OF_STREAMS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; + private final ExecutorService executorService; + private final AtomicInteger progressCounter = new AtomicInteger(); + private final URI uri; + private final CircuitBreakerService breakerService; - ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task) { + ModelImporter( + Client client, + String modelId, + ModelPackageConfig packageConfig, + ModelDownloadTask task, + ThreadPool threadPool, + CircuitBreakerService cbs + ) throws URISyntaxException { this.client = client; this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); + this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); + this.uri = ModelLoaderUtils.resolvePackageLocation( + config.getModelRepository(), + config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION + ); + this.breakerService = cbs; } - public void doImport() throws URISyntaxException, IOException, ElasticsearchStatusException { - long size = config.getSize(); + public void doImport(ActionListener listener) { + executorService.execute(() -> doImportInternal(listener)); + } - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { - uploadVocabulary(); + private void doImportInternal(ActionListener finalListener) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() + ); - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - } + ModelLoaderUtils.VocabularyParts vocabularyParts = null; + try { + if (config.getVocabularyFile() != null) { + vocabularyParts = ModelLoaderUtils.loadVocabulary( + ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) + ); + } - URI uri = ModelLoaderUtils.resolvePackageLocation( - config.getModelRepository(), - config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION - ); + // simple round up + int totalParts = (int) ((config.getSize() + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); - InputStream modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); + if (ModelLoaderUtils.uriIsFile(uri) == false) { + breakerService.getBreaker(CircuitBreaker.REQUEST) + .addEstimateBytesAndMaybeBreak(DEFAULT_CHUNK_SIZE * NUMBER_OF_STREAMS, "model importer"); + var breakerFreeingListener = ActionListener.runAfter( + finalListener, + () -> breakerService.getBreaker(CircuitBreaker.REQUEST).addWithoutBreaking(-(DEFAULT_CHUNK_SIZE * NUMBER_OF_STREAMS)) + ); - ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE); + var ranges = ModelLoaderUtils.split(config.getSize(), NUMBER_OF_STREAMS, DEFAULT_CHUNK_SIZE); + var downloaders = new ArrayList(ranges.size()); + for (var range : ranges) { + downloaders.add(new ModelLoaderUtils.HttpStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); + } + downloadModelDefinition(config.getSize(), totalParts, vocabularyParts, downloaders, breakerFreeingListener); + } else { + InputStream modelInputStream = ModelLoaderUtils.getFileInputStream(uri); + ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker( + modelInputStream, + DEFAULT_CHUNK_SIZE + ); + readModelDefinitionFromFile(config.getSize(), totalParts, chunkIterator, vocabularyParts, finalListener); + } + } catch (Exception e) { + finalListener.onFailure(e); + } + } - // simple round up - int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + void downloadModelDefinition( + long size, + int totalParts, + @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, + List downloaders, + ActionListener finalListener + ) { + try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { + var finalDownloader = downloaders.get(downloaders.size() - 1); + downloadFinalPart(size, totalParts, finalDownloader, finalListener.delegateFailureAndWrap((l, r) -> { + checkDownloadComplete(downloaders); + l.onResponse(AcknowledgedResponse.TRUE); + })); + }), finalListener::onFailure))) { + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (vocabularyParts != null) { + uploadVocabulary(vocabularyParts, countingListener); + } - for (int part = 0; part < totalParts - 1; ++part) { - task.setProgress(totalParts, part); - BytesArray definition = chunkIterator.next(); + // Download all but the final split. + // The final split is a single chunk + for (int streamSplit = 0; streamSplit < downloaders.size() - 1; ++streamSplit) { + final var downloader = downloaders.get(streamSplit); + var rangeDownloadedListener = countingListener.acquire(); // acquire to keep the counting listener from closing + executorService.execute( + () -> downloadPartInRange(size, totalParts, downloader, executorService, countingListener, rangeDownloadedListener) + ); + } + } + } - PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( - modelId, - definition, - part, - size, - totalParts, - true + private void downloadPartInRange( + long size, + int totalParts, + ModelLoaderUtils.HttpStreamChunker downloadChunker, + ExecutorService executorService, + RefCountingListener countingListener, + ActionListener rangeFullyDownloadedListener + ) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() ); - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest); + if (countingListener.isFailing()) { + rangeFullyDownloadedListener.onResponse(null); // the error has already been reported elsewhere + return; } - // get the last part, this time verify the checksum and size - BytesArray definition = chunkIterator.next(); + try { + throwIfTaskCancelled(); + var bytesAndIndex = downloadChunker.next(); + task.setProgress(totalParts, progressCounter.getAndIncrement()); - if (config.getSha256().equals(chunkIterator.getSha256()) == false) { - String message = format( - "Model sha256 checksums do not match, expected [%s] but got [%s]", - config.getSha256(), - chunkIterator.getSha256() - ); + indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes()); + } catch (Exception e) { + rangeFullyDownloadedListener.onFailure(e); + return; + } - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + if (downloadChunker.hasNext()) { + executorService.execute( + () -> downloadPartInRange( + size, + totalParts, + downloadChunker, + executorService, + countingListener, + rangeFullyDownloadedListener + ) + ); + } else { + rangeFullyDownloadedListener.onResponse(null); } + } - if (config.getSize() != chunkIterator.getTotalBytesRead()) { - String message = format( - "Model size does not match, expected [%d] but got [%d]", - config.getSize(), - chunkIterator.getTotalBytesRead() + private void downloadFinalPart( + long size, + int totalParts, + ModelLoaderUtils.HttpStreamChunker downloader, + ActionListener lastPartWrittenListener + ) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() ); - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + try { + var bytesAndIndex = downloader.next(); + task.setProgress(totalParts, progressCounter.getAndIncrement()); + + indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes()); + lastPartWrittenListener.onResponse(AcknowledgedResponse.TRUE); + } catch (Exception e) { + lastPartWrittenListener.onFailure(e); } + } - PutTrainedModelDefinitionPartAction.Request finalModelPartRequest = new PutTrainedModelDefinitionPartAction.Request( - modelId, - definition, - totalParts - 1, - size, - totalParts, - true - ); + void readModelDefinitionFromFile( + long size, + int totalParts, + ModelLoaderUtils.InputStreamChunker chunkIterator, + @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, + ActionListener finalListener + ) { + try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { + finalListener.onResponse(AcknowledgedResponse.TRUE); + }), finalListener::onFailure))) { + try { + if (vocabularyParts != null) { + uploadVocabulary(vocabularyParts, countingListener); + } - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, finalModelPartRequest); - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); - } + for (int part = 0; part < totalParts; ++part) { + throwIfTaskCancelled(); + task.setProgress(totalParts, part); + BytesArray definition = chunkIterator.next(); + indexPart(part, totalParts, size, definition); + } + task.setProgress(totalParts, totalParts); - private void uploadVocabulary() throws URISyntaxException { - ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( - ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) - ); + checkDownloadComplete(chunkIterator, totalParts); + } catch (Exception e) { + countingListener.acquire().onFailure(e); + } + } + } + private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, RefCountingListener countingListener) { PutTrainedModelVocabularyAction.Request request = new PutTrainedModelVocabularyAction.Request( modelId, vocabularyParts.vocab(), @@ -136,17 +277,58 @@ private void uploadVocabulary() throws URISyntaxException { true ); - executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request); + client.execute(PutTrainedModelVocabularyAction.INSTANCE, request, countingListener.acquire(r -> { + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); } - private void executeRequestIfNotCancelled( - ActionType action, - Request request - ) { - if (task.isCancelled()) { - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); + private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray bytes) { + PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( + modelId, + bytes, + partIndex, + totalSize, + totalParts, + true + ); + + client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest).actionGet(); + } + + private void checkDownloadComplete(List downloaders) { + long totalBytesRead = downloaders.stream().mapToLong(ModelLoaderUtils.HttpStreamChunker::getTotalBytesRead).sum(); + int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttpStreamChunker::getCurrentPart).sum(); + checkSize(totalBytesRead); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + } + + private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker fileInputStream, int totalParts) { + checkSha256(fileInputStream.getSha256()); + checkSize(fileInputStream.getTotalBytesRead()); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + } + + private void checkSha256(String sha256) { + if (config.getSha256().equals(sha256) == false) { + String message = format("Model sha256 checksums do not match, expected [%s] but got [%s]", config.getSha256(), sha256); + + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); } + } - client.execute(action, request).actionGet(); + private void checkSize(long definitionSize) { + if (config.getSize() != definitionSize) { + String message = format("Model size does not match, expected [%d] but got [%d]", config.getSize(), definitionSize); + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + } + + private void throwIfTaskCancelled() { + if (task.isCancelled()) { + logger.info("Model [{}] download task cancelled", modelId); + throw new TaskCancelledException( + format("Model [%s] download task cancelled with reason [%s]", modelId, task.getReasonCancelled()) + ); + } } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 2f3f9cbf3f32c..e92aff74be463 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentParser; @@ -34,16 +35,20 @@ import java.security.AccessController; import java.security.MessageDigest; import java.security.PrivilegedAction; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import static java.net.HttpURLConnection.HTTP_MOVED_PERM; import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; +import static java.net.HttpURLConnection.HTTP_PARTIAL; import static java.net.HttpURLConnection.HTTP_SEE_OTHER; /** @@ -61,6 +66,75 @@ final class ModelLoaderUtils { record VocabularyParts(List vocab, List merges, List scores) {} + // Range in bytes + record RequestRange(long rangeStart, long rangeEnd, int startPart, int numParts) { + public String bytesRange() { + return "bytes=" + rangeStart + "-" + rangeEnd; + } + } + + static class HttpStreamChunker { + + record BytesAndPartIndex(BytesArray bytes, int partIndex) {} + + private final InputStream inputStream; + private final int chunkSize; + private final AtomicLong totalBytesRead = new AtomicLong(); + private final AtomicInteger currentPart; + private final int lastPartNumber; + private final byte[] buf; + + HttpStreamChunker(URI uri, RequestRange range, int chunkSize) { + var inputStream = getHttpOrHttpsInputStream(uri, range); + this.inputStream = inputStream; + this.chunkSize = chunkSize; + this.lastPartNumber = range.startPart() + range.numParts(); + this.currentPart = new AtomicInteger(range.startPart()); + this.buf = new byte[chunkSize]; + } + + // This ctor exists for testing purposes only. + HttpStreamChunker(InputStream inputStream, RequestRange range, int chunkSize) { + this.inputStream = inputStream; + this.chunkSize = chunkSize; + this.lastPartNumber = range.startPart() + range.numParts(); + this.currentPart = new AtomicInteger(range.startPart()); + this.buf = new byte[chunkSize]; + } + + public boolean hasNext() { + return currentPart.get() < lastPartNumber; + } + + public BytesAndPartIndex next() throws IOException { + int bytesRead = 0; + + while (bytesRead < chunkSize) { + int read = inputStream.read(buf, bytesRead, chunkSize - bytesRead); + // EOF?? + if (read == -1) { + break; + } + bytesRead += read; + } + + if (bytesRead > 0) { + totalBytesRead.addAndGet(bytesRead); + return new BytesAndPartIndex(new BytesArray(buf, 0, bytesRead), currentPart.getAndIncrement()); + } else { + return new BytesAndPartIndex(BytesArray.EMPTY, currentPart.get()); + } + } + + public long getTotalBytesRead() { + return totalBytesRead.get(); + } + + public int getCurrentPart() { + return currentPart.get(); + } + } + static class InputStreamChunker { private final InputStream inputStream; @@ -101,14 +175,14 @@ public int getTotalBytesRead() { } } - static InputStream getInputStreamFromModelRepository(URI uri) throws IOException { + static InputStream getInputStreamFromModelRepository(URI uri) { String scheme = uri.getScheme().toLowerCase(Locale.ROOT); // if you add a scheme here, also add it to the bootstrap check in {@link MachineLearningPackageLoader#validateModelRepository} switch (scheme) { case "http": case "https": - return getHttpOrHttpsInputStream(uri); + return getHttpOrHttpsInputStream(uri, null); case "file": return getFileInputStream(uri); default: @@ -116,6 +190,11 @@ static InputStream getInputStreamFromModelRepository(URI uri) throws IOException } } + static boolean uriIsFile(URI uri) { + String scheme = uri.getScheme().toLowerCase(Locale.ROOT); + return "file".equals(scheme); + } + static VocabularyParts loadVocabulary(URI uri) { if (uri.getPath().endsWith(".json")) { try (InputStream vocabInputStream = getInputStreamFromModelRepository(uri)) { @@ -174,7 +253,7 @@ private ModelLoaderUtils() {} @SuppressWarnings("'java.lang.SecurityManager' is deprecated and marked for removal ") @SuppressForbidden(reason = "we need socket connection to download") - private static InputStream getHttpOrHttpsInputStream(URI uri) throws IOException { + private static InputStream getHttpOrHttpsInputStream(URI uri, @Nullable RequestRange range) { assert uri.getUserInfo() == null : "URI's with credentials are not supported"; @@ -186,18 +265,30 @@ private static InputStream getHttpOrHttpsInputStream(URI uri) throws IOException PrivilegedAction privilegedHttpReader = () -> { try { HttpURLConnection conn = (HttpURLConnection) uri.toURL().openConnection(); + if (range != null) { + conn.setRequestProperty("Range", range.bytesRange()); + } switch (conn.getResponseCode()) { case HTTP_OK: + case HTTP_PARTIAL: return conn.getInputStream(); + case HTTP_MOVED_PERM: case HTTP_MOVED_TEMP: case HTTP_SEE_OTHER: throw new IllegalStateException("redirects aren't supported yet"); case HTTP_NOT_FOUND: throw new ResourceNotFoundException("{} not found", uri); + case 416: // Range not satisfiable, for some reason not in the list of constants + throw new IllegalStateException("Invalid request range [" + range.bytesRange() + "]"); default: int responseCode = conn.getResponseCode(); - throw new ElasticsearchStatusException("error during downloading {}", RestStatus.fromCode(responseCode), uri); + throw new ElasticsearchStatusException( + "error during downloading {}. Got response code {}", + RestStatus.fromCode(responseCode), + uri, + responseCode + ); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -209,7 +300,7 @@ private static InputStream getHttpOrHttpsInputStream(URI uri) throws IOException @SuppressWarnings("'java.lang.SecurityManager' is deprecated and marked for removal ") @SuppressForbidden(reason = "we need load model data from a file") - private static InputStream getFileInputStream(URI uri) { + static InputStream getFileInputStream(URI uri) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -232,4 +323,53 @@ private static InputStream getFileInputStream(URI uri) { return AccessController.doPrivileged(privilegedFileReader); } + /** + * Split a stream of size {@code sizeInBytes} into {@code numberOfStreams} +1 + * ranges aligned on {@code chunkSizeBytes} boundaries. Each range contains a + * whole number of chunks. + * The first {@code numberOfStreams} ranges will be split evenly (in terms of + * number of chunks not the byte size), the final range split + * is for the single final chunk and will be no more than {@code chunkSizeBytes} + * in size. The separate range for the final chunk is because when streaming and + * uploading a large model definition, writing the last part has to handled + * as a special case. + * @param sizeInBytes The total size of the stream + * @param numberOfStreams Divide the bulk of the size into this many streams. + * @param chunkSizeBytes The size of each chunk + * @return List of {@code numberOfStreams} + 1 ranges. + */ + static List split(long sizeInBytes, int numberOfStreams, long chunkSizeBytes) { + int numberOfChunks = (int) ((sizeInBytes + chunkSizeBytes - 1) / chunkSizeBytes); + + var ranges = new ArrayList(); + + int baseChunksPerStream = numberOfChunks / numberOfStreams; + int remainder = numberOfChunks % numberOfStreams; + long startOffset = 0; + int startChunkIndex = 0; + + for (int i = 0; i < numberOfStreams - 1; i++) { + int numChunksInStream = (i < remainder) ? baseChunksPerStream + 1 : baseChunksPerStream; + long rangeEnd = startOffset + (numChunksInStream * chunkSizeBytes) - 1; // range index is 0 based + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, numChunksInStream)); + startOffset = rangeEnd + 1; // range is inclusive start and end + startChunkIndex += numChunksInStream; + } + + // Want the final range request to be a single chunk + if (baseChunksPerStream > 1) { + int numChunksExcludingFinal = baseChunksPerStream - 1; + long rangeEnd = startOffset + (numChunksExcludingFinal * chunkSizeBytes) - 1; + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, numChunksExcludingFinal)); + + startOffset = rangeEnd + 1; + startChunkIndex += numChunksExcludingFinal; + } + + // The final range is a single chunk the end of which should not exceed sizeInBytes + long rangeEnd = Math.min(sizeInBytes, startOffset + (baseChunksPerStream * chunkSizeBytes)) - 1; + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, 1)); + + return ranges; + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java index ba50f2f6a6b74..68f869742d9e5 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java @@ -77,7 +77,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A String packagedModelId = request.getPackagedModelId(); logger.debug(() -> format("Fetch package manifest for [%s] from [%s]", packagedModelId, repository)); - threadPool.executor(MachineLearningPackageLoader.UTILITY_THREAD_POOL_NAME).execute(() -> { + threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME).execute(() -> { try { URI uri = ModelLoaderUtils.resolvePackageLocation(repository, packagedModelId + ModelLoaderUtils.METADATA_FILE_EXTENSION); InputStream inputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 70dcee165d3f6..76b7781b1cffe 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -37,14 +38,12 @@ import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction.Request; -import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -57,6 +56,7 @@ public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction< private static final Logger logger = LogManager.getLogger(TransportLoadTrainedModelPackage.class); private final Client client; + private final CircuitBreakerService circuitBreakerService; @Inject public TransportLoadTrainedModelPackage( @@ -65,7 +65,8 @@ public TransportLoadTrainedModelPackage( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client + Client client, + CircuitBreakerService circuitBreakerService ) { super( LoadTrainedModelPackageAction.NAME, @@ -79,6 +80,7 @@ public TransportLoadTrainedModelPackage( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.client = new OriginSettingClient(client, ML_ORIGIN); + this.circuitBreakerService = circuitBreakerService; } @Override @@ -98,11 +100,14 @@ protected void masterOperation(Task task, Request request, ClusterState state, A parentTaskAssigningClient, request.getModelId(), request.getModelPackageConfig(), - downloadTask + downloadTask, + threadPool, + circuitBreakerService ); - threadPool.executor(MachineLearningPackageLoader.UTILITY_THREAD_POOL_NAME) - .execute(() -> importModel(client, taskManager, request, modelImporter, listener, downloadTask)); + var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); + + importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -136,16 +141,12 @@ static void importModel( ActionListener listener, Task task ) { - String modelId = request.getModelId(); - final AtomicReference exceptionRef = new AtomicReference<>(); - - try { - final long relativeStartNanos = System.nanoTime(); + final String modelId = request.getModelId(); + final long relativeStartNanos = System.nanoTime(); - logAndWriteNotificationAtLevel(auditClient, modelId, "starting model import", Level.INFO); - - modelImporter.doImport(); + logAndWriteNotificationAtLevel(auditClient, modelId, "starting model import", Level.INFO); + var finishListener = ActionListener.wrap(success -> { final long totalRuntimeNanos = System.nanoTime() - relativeStartNanos; logAndWriteNotificationAtLevel( auditClient, @@ -153,29 +154,25 @@ static void importModel( format("finished model import after [%d] seconds", TimeUnit.NANOSECONDS.toSeconds(totalRuntimeNanos)), Level.INFO ); - } catch (TaskCancelledException e) { - recordError(auditClient, modelId, exceptionRef, e, Level.WARNING); - } catch (ElasticsearchException e) { - recordError(auditClient, modelId, exceptionRef, e, Level.ERROR); - } catch (MalformedURLException e) { - recordError(auditClient, modelId, "an invalid URL", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } catch (URISyntaxException e) { - recordError(auditClient, modelId, "an invalid URL syntax", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } catch (IOException e) { - recordError(auditClient, modelId, "an IOException", exceptionRef, e, Level.ERROR, RestStatus.SERVICE_UNAVAILABLE); - } catch (Exception e) { - recordError(auditClient, modelId, "an Exception", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } finally { - taskManager.unregister(task); - - if (request.isWaitForCompletion()) { - if (exceptionRef.get() != null) { - listener.onFailure(exceptionRef.get()); - } else { - listener.onResponse(AcknowledgedResponse.TRUE); - } + listener.onResponse(AcknowledgedResponse.TRUE); + }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + + modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + } - } + static Exception processException(Client auditClient, String modelId, Exception e) { + if (e instanceof TaskCancelledException te) { + return recordError(auditClient, modelId, te, Level.WARNING); + } else if (e instanceof ElasticsearchException es) { + return recordError(auditClient, modelId, es, Level.ERROR); + } else if (e instanceof MalformedURLException) { + return recordError(auditClient, modelId, "an invalid URL", e, Level.ERROR, RestStatus.BAD_REQUEST); + } else if (e instanceof URISyntaxException) { + return recordError(auditClient, modelId, "an invalid URL syntax", e, Level.ERROR, RestStatus.BAD_REQUEST); + } else if (e instanceof IOException) { + return recordError(auditClient, modelId, "an IOException", e, Level.ERROR, RestStatus.SERVICE_UNAVAILABLE); + } else { + return recordError(auditClient, modelId, "an Exception", e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); } } @@ -213,30 +210,16 @@ public ModelDownloadTask createTask(long id, String type, String action, TaskId } } - private static void recordError( - Client client, - String modelId, - AtomicReference exceptionRef, - ElasticsearchException e, - Level level - ) { + private static Exception recordError(Client client, String modelId, ElasticsearchException e, Level level) { String message = format("Model importing failed due to [%s]", e.getDetailedMessage()); logAndWriteNotificationAtLevel(client, modelId, message, level); - exceptionRef.set(e); + return e; } - private static void recordError( - Client client, - String modelId, - String failureType, - AtomicReference exceptionRef, - Exception e, - Level level, - RestStatus status - ) { + private static Exception recordError(Client client, String modelId, String failureType, Exception e, Level level, RestStatus status) { String message = format("Model importing failed due to %s [%s]", failureType, e); logAndWriteNotificationAtLevel(client, modelId, message, level); - exceptionRef.set(new ElasticsearchStatusException(message, status, e)); + return new ElasticsearchStatusException(message, status, e); } private static void logAndWriteNotificationAtLevel(Client client, String modelId, String message, Level level) { diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java index 967d1b4ba4b6a..2e487b6a9624c 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java @@ -7,9 +7,13 @@ package org.elasticsearch.xpack.ml.packageloader; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.PathUtils; import org.elasticsearch.test.ESTestCase; +import java.util.List; + import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -80,4 +84,12 @@ public void testValidateModelRepository() { assertEquals("xpack.ml.model_repository does not support authentication", e.getMessage()); } + + public void testThreadPoolHasSingleThread() { + var fixedThreadPool = MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY); + List> settings = fixedThreadPool.getRegisteredSettings(); + var sizeSettting = settings.stream().filter(s -> s.getKey().startsWith("xpack.ml.model_download_thread_pool")).findFirst(); + assertTrue(sizeSettting.isPresent()); + assertEquals(5, sizeSettting.get().get(Settings.EMPTY)); + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java index 0afd08c70cf45..3a682fb6a5094 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java @@ -20,14 +20,7 @@ public class ModelDownloadTaskTests extends ESTestCase { public void testStatus() { - var task = new ModelDownloadTask( - 0L, - MODEL_IMPORT_TASK_TYPE, - MODEL_IMPORT_TASK_ACTION, - downloadModelTaskDescription("foo"), - TaskId.EMPTY_TASK_ID, - Map.of() - ); + var task = testTask(); task.setProgress(100, 0); var taskInfo = task.taskInfo("node", true); @@ -39,4 +32,15 @@ public void testStatus() { status = Strings.toString(taskInfo.status()); assertThat(status, containsString("{\"total_parts\":100,\"downloaded_parts\":1}")); } + + public static ModelDownloadTask testTask() { + return new ModelDownloadTask( + 0L, + MODEL_IMPORT_TASK_TYPE, + MODEL_IMPORT_TASK_ACTION, + downloadModelTaskDescription("foo"), + TaskId.EMPTY_TASK_ID, + Map.of() + ); + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java new file mode 100644 index 0000000000000..cbcf74e69f588 --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -0,0 +1,334 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; +import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; +import org.junit.After; +import org.junit.Before; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ModelImporterTests extends ESTestCase { + + private TestThreadPool threadPool; + + @Before + public void createThreadPool() { + threadPool = createThreadPool(MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY)); + } + + @After + public void closeThreadPool() { + threadPool.close(); + } + + public void testDownloadModelDefinition() throws InterruptedException, URISyntaxException { + var client = mockClient(false); + var task = ModelDownloadTaskTests.testTask(); + var config = mockConfigWithRepoLinks(); + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 5; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); + + var digest = computeDigest(modelDef); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); + importer.downloadModelDefinition(size, totalParts, vocab, streamers, latchedListener); + + latch.await(); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + assertEquals(totalParts - 1, task.getStatus().downloadProgress().downloadedParts()); + assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); + } + + public void testReadModelDefinitionFromFile() throws InterruptedException, URISyntaxException { + var client = mockClient(false); + var task = ModelDownloadTaskTests.testTask(); + var config = mockConfigWithRepoLinks(); + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 3; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + + var digest = computeDigest(modelDef); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + var streamChunker = new ModelLoaderUtils.InputStreamChunker(new ByteArrayInputStream(modelDef), chunkSize); + + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); + importer.readModelDefinitionFromFile(size, totalParts, streamChunker, vocab, latchedListener); + + latch.await(); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + assertEquals(totalParts, task.getStatus().downloadProgress().downloadedParts()); + assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); + } + + public void testSizeMismatch() throws InterruptedException, URISyntaxException { + var client = mockClient(false); + var task = mock(ModelDownloadTask.class); + var config = mockConfigWithRepoLinks(); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 5; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); + + var digest = computeDigest(modelDef); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size - 1); // expected size and read size are different + + var exceptionHolder = new AtomicReference(); + + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + importer.downloadModelDefinition(size, totalParts, null, streamers, latchedListener); + + latch.await(); + assertThat(exceptionHolder.get().getMessage(), containsString("Model size does not match")); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + } + + public void testDigestMismatch() throws InterruptedException, URISyntaxException { + var client = mockClient(false); + var task = mock(ModelDownloadTask.class); + var config = mockConfigWithRepoLinks(); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 5; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); + + when(config.getSha256()).thenReturn("0x"); // digest is different + when(config.getSize()).thenReturn(size); + + var exceptionHolder = new AtomicReference(); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + // Message digest can only be calculated for the file reader + var streamChunker = new ModelLoaderUtils.InputStreamChunker(new ByteArrayInputStream(modelDef), chunkSize); + importer.readModelDefinitionFromFile(size, totalParts, streamChunker, null, latchedListener); + + latch.await(); + assertThat(exceptionHolder.get().getMessage(), containsString("Model sha256 checksums do not match")); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + } + + public void testPutFailure() throws InterruptedException, URISyntaxException { + var client = mockClient(true); // client will fail put + var task = mock(ModelDownloadTask.class); + var config = mockConfigWithRepoLinks(); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 4; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 1); + + var exceptionHolder = new AtomicReference(); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + importer.downloadModelDefinition(size, totalParts, null, streamers, latchedListener); + + latch.await(); + assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); + verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + } + + public void testReadFailure() throws IOException, InterruptedException, URISyntaxException { + var client = mockClient(true); + var task = mock(ModelDownloadTask.class); + var config = mockConfigWithRepoLinks(); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + int totalParts = 4; + int chunkSize = 10; + long size = totalParts * chunkSize; + + var streamer = mock(ModelLoaderUtils.HttpStreamChunker.class); + when(streamer.hasNext()).thenReturn(true); + when(streamer.next()).thenThrow(new IOException("stream failed")); // fail the read + + var exceptionHolder = new AtomicReference(); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + importer.downloadModelDefinition(size, totalParts, null, List.of(streamer), latchedListener); + + latch.await(); + assertThat(exceptionHolder.get().getMessage(), containsString("stream failed")); + } + + @SuppressWarnings("unchecked") + public void testUploadVocabFailure() throws InterruptedException, URISyntaxException { + var client = mock(Client.class); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new ElasticsearchStatusException("put vocab failed", RestStatus.BAD_REQUEST)); + return null; + }).when(client).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); + var cbs = mock(CircuitBreakerService.class); + when(cbs.getBreaker(eq(CircuitBreaker.REQUEST))).thenReturn(mock(CircuitBreaker.class)); + + var task = mock(ModelDownloadTask.class); + var config = mockConfigWithRepoLinks(); + + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + + var exceptionHolder = new AtomicReference(); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + + var importer = new ModelImporter(client, "foo", config, task, threadPool, cbs); + importer.downloadModelDefinition(100, 5, vocab, List.of(), latchedListener); + + latch.await(); + assertThat(exceptionHolder.get().getMessage(), containsString("put vocab failed")); + verify(client, times(1)).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); + verify(client, never()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any()); + } + + private List mockHttpStreamChunkers(byte[] modelDef, int chunkSize, int numStreams) { + var ranges = ModelLoaderUtils.split(modelDef.length, numStreams, chunkSize); + + var result = new ArrayList(ranges.size()); + for (var range : ranges) { + int len = range.numParts() * chunkSize; + var modelDefStream = new ByteArrayInputStream(modelDef, (int) range.rangeStart(), len); + result.add(new ModelLoaderUtils.HttpStreamChunker(modelDefStream, range, chunkSize)); + } + + return result; + } + + private byte[] modelDefinition(int totalParts, int chunkSize) { + var bytes = new byte[totalParts * chunkSize]; + for (int i = 0; i < totalParts; i++) { + System.arraycopy(randomByteArrayOfLength(chunkSize), 0, bytes, i * chunkSize, chunkSize); + } + return bytes; + } + + private String computeDigest(byte[] modelDef) { + var digest = MessageDigests.sha256(); + digest.update(modelDef); + return MessageDigests.toHexString(digest.digest()); + } + + @SuppressWarnings("unchecked") + private Client mockClient(boolean failPutPart) { + var client = mock(Client.class); + + if (failPutPart) { + when(client.execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any())).thenThrow( + new IllegalStateException("put model part failed") + ); + } else { + ActionFuture future = mock(ActionFuture.class); + when(future.actionGet()).thenReturn(AcknowledgedResponse.TRUE); + when(client.execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any())).thenReturn(future); + } + + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(AcknowledgedResponse.TRUE); + return null; + }).when(client).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); + + return client; + } + + private ModelPackageConfig mockConfigWithRepoLinks() { + var config = mock(ModelPackageConfig.class); + when(config.getModelRepository()).thenReturn("https://models.models"); + when(config.getPackagedModelId()).thenReturn("my-model"); + return config; + } +} diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java index 661cd12f99957..f421a7b44e7f1 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java @@ -17,6 +17,7 @@ import java.nio.charset.StandardCharsets; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; public class ModelLoaderUtilsTests extends ESTestCase { @@ -80,14 +81,13 @@ public void testSha256AndSize() throws IOException { assertEquals(64, expectedDigest.length()); int chunkSize = randomIntBetween(100, 10_000); + int totalParts = (bytes.length + chunkSize - 1) / chunkSize; ModelLoaderUtils.InputStreamChunker inputStreamChunker = new ModelLoaderUtils.InputStreamChunker( new ByteArrayInputStream(bytes), chunkSize ); - int totalParts = (bytes.length + chunkSize - 1) / chunkSize; - for (int part = 0; part < totalParts - 1; ++part) { assertEquals(chunkSize, inputStreamChunker.next().length()); } @@ -112,4 +112,40 @@ public void testParseVocabulary() throws IOException { assertThat(parsedVocab.merges(), contains("mergefoo", "mergebar", "mergebaz")); assertThat(parsedVocab.scores(), contains(1.0, 2.0, 3.0)); } + + public void testSplitIntoRanges() { + long totalSize = randomLongBetween(10_000, 50_000_000); + int numStreams = randomIntBetween(1, 10); + int chunkSize = 1024; + var ranges = ModelLoaderUtils.split(totalSize, numStreams, chunkSize); + assertThat(ranges, hasSize(numStreams + 1)); + + int expectedNumChunks = (int) ((totalSize + chunkSize - 1) / chunkSize); + assertThat(ranges.stream().mapToInt(ModelLoaderUtils.RequestRange::numParts).sum(), is(expectedNumChunks)); + + long startBytes = 0; + int startPartIndex = 0; + for (int i = 0; i < ranges.size() - 1; i++) { + assertThat(ranges.get(i).rangeStart(), is(startBytes)); + long end = startBytes + ((long) ranges.get(i).numParts() * chunkSize) - 1; + assertThat(ranges.get(i).rangeEnd(), is(end)); + long expectedNumBytesInRange = (long) chunkSize * ranges.get(i).numParts() - 1; + assertThat(ranges.get(i).rangeEnd() - ranges.get(i).rangeStart(), is(expectedNumBytesInRange)); + assertThat(ranges.get(i).startPart(), is(startPartIndex)); + + startBytes = end + 1; + startPartIndex += ranges.get(i).numParts(); + } + + var finalRange = ranges.get(ranges.size() - 1); + assertThat(finalRange.rangeStart(), is(startBytes)); + assertThat(finalRange.rangeEnd(), is(totalSize - 1)); + assertThat(finalRange.numParts(), is(1)); + } + + public void testRangeRequestBytesRange() { + long start = randomLongBetween(0, 2 << 10); + long end = randomLongBetween(start + 1, 2 << 11); + assertEquals("bytes=" + start + "-" + end, new ModelLoaderUtils.RequestRange(start, end, 0, 1).bytesRange()); + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index a3f59e13f2f5b..cbcfd5b760779 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -33,7 +33,7 @@ import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -42,7 +42,7 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; public void testSendsFinishedUploadNotification() { - var uploader = mock(ModelImporter.class); + var uploader = createUploader(null); var taskManager = mock(TaskManager.class); var task = mock(Task.class); var client = mock(Client.class); @@ -63,49 +63,49 @@ public void testSendsFinishedUploadNotification() { assertThat(notificationArg.getValue().getMessage(), CoreMatchers.containsString("finished model import after")); } - public void testSendsErrorNotificationForInternalError() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForInternalError() throws Exception { ElasticsearchStatusException exception = new ElasticsearchStatusException("exception", RestStatus.INTERNAL_SERVER_ERROR); String message = format("Model importing failed due to [%s]", exception.toString()); assertUploadCallsOnFailure(exception, message, Level.ERROR); } - public void testSendsErrorNotificationForMalformedURL() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForMalformedURL() throws Exception { MalformedURLException exception = new MalformedURLException("exception"); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL", exception.toString()); - assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); + assertUploadCallsOnFailure(exception, message, RestStatus.BAD_REQUEST, Level.ERROR); } - public void testSendsErrorNotificationForURISyntax() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForURISyntax() throws Exception { URISyntaxException exception = mock(URISyntaxException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL syntax", exception.toString()); - assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); + assertUploadCallsOnFailure(exception, message, RestStatus.BAD_REQUEST, Level.ERROR); } - public void testSendsErrorNotificationForIOException() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForIOException() throws Exception { IOException exception = mock(IOException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an IOException", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.SERVICE_UNAVAILABLE, Level.ERROR); } - public void testSendsErrorNotificationForException() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForException() throws Exception { RuntimeException exception = mock(RuntimeException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an Exception", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); } - public void testSendsWarningNotificationForTaskCancelledException() throws URISyntaxException, IOException { + public void testSendsWarningNotificationForTaskCancelledException() throws Exception { TaskCancelledException exception = new TaskCancelledException("cancelled"); String message = format("Model importing failed due to [%s]", exception.toString()); assertUploadCallsOnFailure(exception, message, Level.WARNING); } - public void testCallsOnResponseWithAcknowledgedResponse() throws URISyntaxException, IOException { + public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); var task = mock(Task.class); @@ -134,15 +134,13 @@ public void testDoesNotCallListenerWhenNotWaitingForCompletion() { ); } - private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws URISyntaxException, - IOException { + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); assertNotificationAndOnFailure(exception, esStatusException, message, level); } - private void assertUploadCallsOnFailure(ElasticsearchException exception, String message, Level level) throws URISyntaxException, - IOException { + private void assertUploadCallsOnFailure(ElasticsearchException exception, String message, Level level) throws Exception { assertNotificationAndOnFailure(exception, exception, message, level); } @@ -151,7 +149,7 @@ private void assertNotificationAndOnFailure( ElasticsearchException onFailureException, String message, Level level - ) throws URISyntaxException, IOException { + ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); var task = mock(Task.class); @@ -179,11 +177,18 @@ private void assertNotificationAndOnFailure( verify(taskManager).unregister(task); } - private ModelImporter createUploader(Exception exception) throws URISyntaxException, IOException { + @SuppressWarnings("unchecked") + private ModelImporter createUploader(Exception exception) { ModelImporter uploader = mock(ModelImporter.class); - if (exception != null) { - doThrow(exception).when(uploader).doImport(); - } + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[0]; + if (exception != null) { + listener.onFailure(exception); + } else { + listener.onResponse(AcknowledgedResponse.TRUE); + } + return null; + }).when(uploader).doImport(any(ActionListener.class)); return uploader; } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 3b705e63a145f..d18b6b6cf9ab6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -172,7 +173,7 @@ protected Function getClientWrapper() { // user. This is ok for internal n2n stuff but the test framework does other things like wiping indices, repositories, etc // that the system user cannot do. so we wrap the node client with a user that can do these things since the client() calls // return a node client - return client -> client.filterWithHeader(headers); + return client -> asInstanceOf(NodeClient.class, client).filterWithHeader(headers); } private Settings externalClusterClientSettings() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 6c59add730052..bbe90f769818b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -259,12 +259,17 @@ public synchronized void start() { } public synchronized void stop() { + clusterService.removeListener(this); stopScheduling(); metrics.close(); } @Override public void clusterChanged(ClusterChangedEvent event) { + if (event.metadataChanged() == false) { + return; + } + updateAutoscalers(event.state()); if (scalers.isEmpty() == false) { startScheduling(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index 624ef5434e2a0..8804d588988b2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -123,8 +123,8 @@ private static AssignmentPlan mergePlans( nodesByZone.values().forEach(allNodes::addAll); final List allDeployments = new ArrayList<>(); - allDeployments.addAll(planForNormalPriorityModels.models()); - allDeployments.addAll(planForLowPriorityModels.models()); + allDeployments.addAll(planForNormalPriorityModels.deployments()); + allDeployments.addAll(planForLowPriorityModels.deployments()); final Map originalNodeById = allNodes.stream() .collect(Collectors.toMap(AssignmentPlan.Node::id, Function.identity())); @@ -139,7 +139,7 @@ private static void copyAssignments( AssignmentPlan.Builder dest, Map originalNodeById ) { - for (AssignmentPlan.Deployment m : source.models()) { + for (AssignmentPlan.Deployment m : source.deployments()) { Map nodeAssignments = source.assignments(m).orElse(Map.of()); for (Map.Entry assignment : nodeAssignments.entrySet()) { AssignmentPlan.Node originalNode = originalNodeById.get(assignment.getKey().id()); @@ -328,14 +328,14 @@ private static long getNodeFreeMemoryExcludingPerNodeOverheadAndNativeInference( private TrainedModelAssignmentMetadata.Builder buildAssignmentsFromPlan(AssignmentPlan assignmentPlan) { TrainedModelAssignmentMetadata.Builder builder = TrainedModelAssignmentMetadata.Builder.empty(); - for (AssignmentPlan.Deployment deployment : assignmentPlan.models()) { - TrainedModelAssignment existingAssignment = currentMetadata.getDeploymentAssignment(deployment.id()); + for (AssignmentPlan.Deployment deployment : assignmentPlan.deployments()) { + TrainedModelAssignment existingAssignment = currentMetadata.getDeploymentAssignment(deployment.deploymentId()); TrainedModelAssignment.Builder assignmentBuilder = existingAssignment == null && createAssignmentRequest.isPresent() ? TrainedModelAssignment.Builder.empty(createAssignmentRequest.get()) : TrainedModelAssignment.Builder.empty( - currentMetadata.getDeploymentAssignment(deployment.id()).getTaskParams(), - currentMetadata.getDeploymentAssignment(deployment.id()).getAdaptiveAllocationsSettings() + currentMetadata.getDeploymentAssignment(deployment.deploymentId()).getTaskParams(), + currentMetadata.getDeploymentAssignment(deployment.deploymentId()).getAdaptiveAllocationsSettings() ); if (existingAssignment != null) { assignmentBuilder.setStartTime(existingAssignment.getStartTime()); @@ -366,7 +366,7 @@ private TrainedModelAssignmentMetadata.Builder buildAssignmentsFromPlan(Assignme assignmentBuilder.calculateAndSetAssignmentState(); explainAssignments(assignmentPlan, nodeLoads, deployment).ifPresent(assignmentBuilder::setReason); - builder.addNewAssignment(deployment.id(), assignmentBuilder); + builder.addNewAssignment(deployment.deploymentId(), assignmentBuilder); } return builder; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java index 0151c8f5ee9c8..66b8d9e570211 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java @@ -54,7 +54,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) { } return new Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), m.allocations() - calculatePreservedAllocations(m), m.threadsPerAllocation(), @@ -71,11 +71,14 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { // they will not match the models/nodes members we have in this class. // Therefore, we build a lookup table based on the ids, so we can merge the plan // with its preserved allocations. - final Map, Integer> plannedAssignmentsByModelNodeIdPair = new HashMap<>(); - for (Deployment m : assignmentPlan.models()) { - Map assignments = assignmentPlan.assignments(m).orElse(Map.of()); - for (Map.Entry nodeAssignment : assignments.entrySet()) { - plannedAssignmentsByModelNodeIdPair.put(Tuple.tuple(m.id(), nodeAssignment.getKey().id()), nodeAssignment.getValue()); + final Map, Integer> plannedAssignmentsByDeploymentNodeIdPair = new HashMap<>(); + for (Deployment d : assignmentPlan.deployments()) { + Map assignmentsOfDeployment = assignmentPlan.assignments(d).orElse(Map.of()); + for (Map.Entry nodeAssignment : assignmentsOfDeployment.entrySet()) { + plannedAssignmentsByDeploymentNodeIdPair.put( + Tuple.tuple(d.deploymentId(), nodeAssignment.getKey().id()), + nodeAssignment.getValue() + ); } } @@ -93,8 +96,8 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { } } for (Deployment deploymentNewAllocations : deployments) { - int newAllocations = plannedAssignmentsByModelNodeIdPair.getOrDefault( - Tuple.tuple(deploymentNewAllocations.id(), n.id()), + int newAllocations = plannedAssignmentsByDeploymentNodeIdPair.getOrDefault( + Tuple.tuple(deploymentNewAllocations.deploymentId(), n.id()), 0 ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java index 7fc16394ed85c..c294e7b2de792 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java @@ -31,8 +31,22 @@ */ public class AssignmentPlan implements Comparable { + /** + * + * @param deploymentId + * @param memoryBytes + * @param allocations + * @param threadsPerAllocation + * @param currentAllocationsByNodeId + * @param maxAssignedAllocations this value is used by the ZoneAwareAssignmentPlan and AssignmentPlanner to keep track of the + * maximum number of allocations which have been assigned. It is mainly for assigning over AZs. + * @param adaptiveAllocationsSettings + * @param priority + * @param perDeploymentMemoryBytes + * @param perAllocationMemoryBytes + */ public record Deployment( - String id, + String deploymentId, long memoryBytes, int allocations, int threadsPerAllocation, @@ -44,7 +58,7 @@ public record Deployment( long perAllocationMemoryBytes ) { public Deployment( - String id, + String deploymentId, long modelBytes, int allocations, int threadsPerAllocation, @@ -55,7 +69,7 @@ public Deployment( long perAllocationMemoryBytes ) { this( - id, + deploymentId, modelBytes, allocations, threadsPerAllocation, @@ -82,7 +96,7 @@ boolean hasEverBeenAllocated() { public long estimateMemoryUsageBytes(int allocations) { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - id, + deploymentId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, @@ -92,13 +106,13 @@ public long estimateMemoryUsageBytes(int allocations) { long estimateAdditionalMemoryUsageBytes(int allocationsOld, int allocationsNew) { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - id, + deploymentId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, allocationsNew ) - StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - id, + deploymentId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, @@ -109,7 +123,7 @@ long estimateAdditionalMemoryUsageBytes(int allocationsOld, int allocationsNew) long minimumMemoryRequiredBytes() { return StartTrainedModelDeploymentAction.estimateMemoryUsageBytes( - id, + deploymentId, memoryBytes, perDeploymentMemoryBytes, perAllocationMemoryBytes, @@ -136,7 +150,7 @@ int findExcessAllocations(int maxAllocations, long availableMemoryBytes) { @Override public String toString() { - return id + return deploymentId + " (mem = " + ByteSizeValue.ofBytes(memoryBytes) + ") (allocations = " @@ -186,7 +200,7 @@ private AssignmentPlan( this.remainingModelAllocations = Objects.requireNonNull(remainingModelAllocations); } - public Set models() { + public Set deployments() { return assignments.keySet(); } @@ -208,7 +222,7 @@ public int compareTo(AssignmentPlan o) { } public boolean satisfiesCurrentAssignments() { - return models().stream().allMatch(this::isSatisfyingCurrentAssignmentsForModel); + return deployments().stream().allMatch(this::isSatisfyingCurrentAssignmentsForModel); } private boolean isSatisfyingCurrentAssignmentsForModel(Deployment m) { @@ -225,18 +239,18 @@ public boolean satisfiesAllocations(Deployment m) { } public boolean satisfiesAllModels() { - return models().stream().allMatch(this::satisfiesAllocations); + return deployments().stream().allMatch(this::satisfiesAllocations); } public boolean arePreviouslyAssignedModelsAssigned() { - return models().stream() + return deployments().stream() .filter(Deployment::hasEverBeenAllocated) .map(this::totalAllocations) .allMatch(totalAllocations -> totalAllocations > 0); } public long countPreviouslyAssignedModelsThatAreStillAssigned() { - return models().stream() + return deployments().stream() .filter(Deployment::hasEverBeenAllocated) .map(this::totalAllocations) .filter(totalAllocations -> totalAllocations > 0) @@ -301,11 +315,11 @@ public String prettyPrint() { msg.append(" ->"); for (Tuple modelAllocations : nodeToModel.get(n) .stream() - .sorted(Comparator.comparing(x -> x.v1().id())) + .sorted(Comparator.comparing(x -> x.v1().deploymentId())) .toList()) { if (modelAllocations.v2() > 0) { msg.append(" "); - msg.append(modelAllocations.v1().id()); + msg.append(modelAllocations.v1().deploymentId()); msg.append(" (mem = "); msg.append(ByteSizeValue.ofBytes(modelAllocations.v1().memoryBytes())); msg.append(")"); @@ -415,7 +429,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio + "] to assign [" + allocations + "] allocations to deployment [" - + deployment.id() + + deployment.deploymentId() + "]" ); } @@ -426,7 +440,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio + "] to assign [" + allocations + "] allocations to deployment [" - + deployment.id() + + deployment.deploymentId() + "]; required threads per allocation [" + deployment.threadsPerAllocation() + "]" @@ -464,7 +478,7 @@ public void accountMemory(Deployment m, Node n) { private void accountMemory(Deployment m, Node n, long requiredMemory) { remainingNodeMemory.computeIfPresent(n, (k, v) -> v - requiredMemory); if (remainingNodeMemory.containsKey(n) && remainingNodeMemory.get(n) < 0) { - throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.id() + "]"); + throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.deploymentId() + "]"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java index 38279a2fd6c03..8b5f33e25e242 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanner.java @@ -50,7 +50,7 @@ public class AssignmentPlanner { public AssignmentPlanner(List nodes, List deployments) { this.nodes = nodes.stream().sorted(Comparator.comparing(Node::id)).toList(); - this.deployments = deployments.stream().sorted(Comparator.comparing(AssignmentPlan.Deployment::id)).toList(); + this.deployments = deployments.stream().sorted(Comparator.comparing(AssignmentPlan.Deployment::deploymentId)).toList(); } public AssignmentPlan computePlan() { @@ -111,7 +111,7 @@ private AssignmentPlan solveAllocatingAtLeastOnceModelsThatWerePreviouslyAllocat .filter(m -> m.hasEverBeenAllocated()) .map( m -> new AssignmentPlan.Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), 1, m.threadsPerAllocation(), @@ -130,21 +130,21 @@ private AssignmentPlan solveAllocatingAtLeastOnceModelsThatWerePreviouslyAllocat ).solvePlan(true); Map modelIdToNodeIdWithSingleAllocation = new HashMap<>(); - for (AssignmentPlan.Deployment m : planWithSingleAllocationForPreviouslyAssignedModels.models()) { + for (AssignmentPlan.Deployment m : planWithSingleAllocationForPreviouslyAssignedModels.deployments()) { Optional> assignments = planWithSingleAllocationForPreviouslyAssignedModels.assignments(m); Set nodes = assignments.orElse(Map.of()).keySet(); if (nodes.isEmpty() == false) { assert nodes.size() == 1; - modelIdToNodeIdWithSingleAllocation.put(m.id(), nodes.iterator().next().id()); + modelIdToNodeIdWithSingleAllocation.put(m.deploymentId(), nodes.iterator().next().id()); } } List planDeployments = deployments.stream().map(m -> { - Map currentAllocationsByNodeId = modelIdToNodeIdWithSingleAllocation.containsKey(m.id()) - ? Map.of(modelIdToNodeIdWithSingleAllocation.get(m.id()), 1) + Map currentAllocationsByNodeId = modelIdToNodeIdWithSingleAllocation.containsKey(m.deploymentId()) + ? Map.of(modelIdToNodeIdWithSingleAllocation.get(m.deploymentId()), 1) : Map.of(); return new AssignmentPlan.Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/LinearProgrammingPlanSolver.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/LinearProgrammingPlanSolver.java index bd97680e285cc..90b3d3590a254 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/LinearProgrammingPlanSolver.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/LinearProgrammingPlanSolver.java @@ -279,24 +279,24 @@ private boolean solveLinearProgram( Map, Variable> allocationVars = new HashMap<>(); - for (AssignmentPlan.Deployment m : deployments) { + for (AssignmentPlan.Deployment d : deployments) { for (Node n : nodes) { - Variable allocationVar = model.addVariable("allocations_of_model_" + m.id() + "_on_node_" + n.id()) + Variable allocationVar = model.addVariable("allocations_of_model_" + d.deploymentId() + "_on_node_" + n.id()) .integer(false) // We relax the program to non-integer as the integer solver is much slower and can often lead to // infeasible solutions .lower(0.0) // It is important not to set an upper bound here as it impacts memory negatively - .weight(weightForAllocationVar(m, n, weights)); - allocationVars.put(Tuple.tuple(m, n), allocationVar); + .weight(weightForAllocationVar(d, n, weights)); + allocationVars.put(Tuple.tuple(d, n), allocationVar); } } - for (Deployment m : deployments) { + for (Deployment d : deployments) { // Each model should not get more allocations than is required. // Also, if the model has previous assignments, it should get at least as many allocations as it did before. - model.addExpression("allocations_of_model_" + m.id() + "_not_more_than_required") - .lower(m.getCurrentAssignedAllocations()) - .upper(m.allocations()) - .setLinearFactorsSimple(varsForModel(m, allocationVars)); + model.addExpression("allocations_of_model_" + d.deploymentId() + "_not_more_than_required") + .lower(d.getCurrentAssignedAllocations()) + .upper(d.allocations()) + .setLinearFactorsSimple(varsForModel(d, allocationVars)); } double[] threadsPerAllocationPerModel = deployments.stream().mapToDouble(m -> m.threadsPerAllocation()).toArray(); @@ -374,18 +374,18 @@ private String prettyPrintSolverResult( for (int i = 0; i < nodes.size(); i++) { Node n = nodes.get(i); msg.append(n + " ->"); - for (Deployment m : deployments) { - if (threadValues.get(Tuple.tuple(m, n)) > 0) { + for (Deployment d : deployments) { + if (threadValues.get(Tuple.tuple(d, n)) > 0) { msg.append(" "); - msg.append(m.id()); + msg.append(d.deploymentId()); msg.append(" (mem = "); - msg.append(ByteSizeValue.ofBytes(m.memoryBytes())); + msg.append(ByteSizeValue.ofBytes(d.memoryBytes())); msg.append(") (allocations = "); - msg.append(threadValues.get(Tuple.tuple(m, n))); + msg.append(threadValues.get(Tuple.tuple(d, n))); msg.append("/"); - msg.append(m.allocations()); + msg.append(d.allocations()); msg.append(") (y = "); - msg.append(assignmentValues.get(Tuple.tuple(m, n))); + msg.append(assignmentValues.get(Tuple.tuple(d, n))); msg.append(")"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java index 1f0857391598f..c5b750f91014f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java @@ -80,22 +80,22 @@ private AssignmentPlan computePlan(boolean tryAssigningPreviouslyAssignedModels) // allocated on the first per zone assignment plans. int remainingZones = nodesByZone.size(); - Map modelIdToRemainingAllocations = deployments.stream() - .collect(Collectors.toMap(AssignmentPlan.Deployment::id, AssignmentPlan.Deployment::allocations)); + Map deploymentIdToRemainingAllocations = deployments.stream() + .collect(Collectors.toMap(AssignmentPlan.Deployment::deploymentId, AssignmentPlan.Deployment::allocations)); List plans = new ArrayList<>(); for (var zoneToNodes : nodesByZone.entrySet()) { logger.debug(() -> format("computing plan for availability zone %s", zoneToNodes.getKey())); AssignmentPlan plan = computeZonePlan( zoneToNodes.getValue(), - modelIdToRemainingAllocations, + deploymentIdToRemainingAllocations, remainingZones, tryAssigningPreviouslyAssignedModels ); - plan.models() + plan.deployments() .forEach( - m -> modelIdToRemainingAllocations.computeIfPresent( - m.id(), - (modelId, remainingAllocations) -> remainingAllocations - plan.totalAllocations(m) + d -> deploymentIdToRemainingAllocations.computeIfPresent( + d.deploymentId(), + (deploymentId, remainingAllocations) -> remainingAllocations - plan.totalAllocations(d) ) ); plans.add(plan); @@ -108,56 +108,69 @@ private AssignmentPlan computePlan(boolean tryAssigningPreviouslyAssignedModels) private AssignmentPlan computeZonePlan( List nodes, - Map modelIdToRemainingAllocations, + Map deploymentIdToRemainingAllocations, int remainingZones, boolean tryAssigningPreviouslyAssignedModels ) { - Map modelIdToTargetAllocations = modelIdToRemainingAllocations.entrySet() + Map deploymentIdToTargetAllocationsPerZone = deploymentIdToRemainingAllocations.entrySet() .stream() .filter(e -> e.getValue() > 0) - .collect(Collectors.toMap(e -> e.getKey(), e -> (e.getValue() - 1) / remainingZones + 1)); + .collect( + Collectors.toMap(Map.Entry::getKey, e -> 1 + remainingAllocationsPerZoneAfterAssigningOne(remainingZones, e.getValue())) + ); + // If there was at least one allocation for a deployment, we will apply it to each zone List modifiedDeployments = deployments.stream() - .filter(m -> modelIdToTargetAllocations.getOrDefault(m.id(), 0) > 0) + .filter(d -> deploymentIdToTargetAllocationsPerZone.getOrDefault(d.deploymentId(), 0) > 0) + // filter out deployments with no allocations .map( - m -> new AssignmentPlan.Deployment( - m.id(), - m.memoryBytes(), - modelIdToTargetAllocations.get(m.id()), - m.threadsPerAllocation(), - m.currentAllocationsByNodeId(), - (tryAssigningPreviouslyAssignedModels && modelIdToRemainingAllocations.get(m.id()) == m.allocations()) - ? m.maxAssignedAllocations() + d -> new AssignmentPlan.Deployment( + // replace each deployment with a new deployment + d.deploymentId(), + d.memoryBytes(), + deploymentIdToTargetAllocationsPerZone.get(d.deploymentId()), + d.threadsPerAllocation(), + d.currentAllocationsByNodeId(), + // (below) Only force assigning at least once previously assigned models that have not had any allocation yet + (tryAssigningPreviouslyAssignedModels && deploymentIdToRemainingAllocations.get(d.deploymentId()) == d.allocations()) + ? d.maxAssignedAllocations() : 0, - m.getAdaptiveAllocationsSettings(), - // Only force assigning at least once previously assigned models that have not had any allocation yet - m.perDeploymentMemoryBytes(), - m.perAllocationMemoryBytes() + d.getAdaptiveAllocationsSettings(), + d.perDeploymentMemoryBytes(), + d.perAllocationMemoryBytes() ) ) .toList(); return new AssignmentPlanner(nodes, modifiedDeployments).computePlan(tryAssigningPreviouslyAssignedModels); } + private static int remainingAllocationsPerZoneAfterAssigningOne(int remainingZones, Integer remainingAllocations) { + if (remainingAllocations == null || remainingZones == 0) { + // should never happen + return 0; + } + return (remainingAllocations - 1) / remainingZones; + } + private AssignmentPlan computePlanAcrossAllNodes(List plans) { logger.debug(() -> "computing plan across all nodes"); final List allNodes = new ArrayList<>(); nodesByZone.values().forEach(allNodes::addAll); - Map> allocationsByNodeIdByModelId = mergeAllocationsByNodeIdByModelId(plans); + Map> allocationsByNodeIdByDeploymentId = mergeAllocationsByNodeIdByDeploymentId(plans); List modelsAccountingPlans = deployments.stream() .map( - m -> new AssignmentPlan.Deployment( - m.id(), - m.memoryBytes(), - m.allocations(), - m.threadsPerAllocation(), - allocationsByNodeIdByModelId.get(m.id()), - m.maxAssignedAllocations(), - m.getAdaptiveAllocationsSettings(), - m.perDeploymentMemoryBytes(), - m.perAllocationMemoryBytes() + d -> new AssignmentPlan.Deployment( + d.deploymentId(), + d.memoryBytes(), + d.allocations(), + d.threadsPerAllocation(), + allocationsByNodeIdByDeploymentId.get(d.deploymentId()), + d.maxAssignedAllocations(), + d.getAdaptiveAllocationsSettings(), + d.perDeploymentMemoryBytes(), + d.perAllocationMemoryBytes() ) ) .toList(); @@ -176,11 +189,11 @@ private AssignmentPlan swapOriginalModelsInPlan( List planDeployments ) { final Map originalModelById = deployments.stream() - .collect(Collectors.toMap(AssignmentPlan.Deployment::id, Function.identity())); + .collect(Collectors.toMap(AssignmentPlan.Deployment::deploymentId, Function.identity())); final Map originalNodeById = allNodes.stream().collect(Collectors.toMap(Node::id, Function.identity())); AssignmentPlan.Builder planBuilder = AssignmentPlan.builder(allNodes, deployments); for (AssignmentPlan.Deployment m : planDeployments) { - AssignmentPlan.Deployment originalDeployment = originalModelById.get(m.id()); + AssignmentPlan.Deployment originalDeployment = originalModelById.get(m.deploymentId()); Map nodeAssignments = plan.assignments(m).orElse(Map.of()); for (Map.Entry assignment : nodeAssignments.entrySet()) { Node originalNode = originalNodeById.get(assignment.getKey().id()); @@ -193,12 +206,12 @@ private AssignmentPlan swapOriginalModelsInPlan( return planBuilder.build(); } - private Map> mergeAllocationsByNodeIdByModelId(List plans) { - Map> allocationsByNodeIdByModelId = new HashMap<>(); - deployments.forEach(m -> allocationsByNodeIdByModelId.put(m.id(), new HashMap<>())); + private Map> mergeAllocationsByNodeIdByDeploymentId(List plans) { + Map> allocationsByNodeIdByDeploymentId = new HashMap<>(); + deployments.forEach(d -> allocationsByNodeIdByDeploymentId.put(d.deploymentId(), new HashMap<>())); for (AssignmentPlan plan : plans) { - for (AssignmentPlan.Deployment m : plan.models()) { - Map nodeIdToAllocations = allocationsByNodeIdByModelId.get(m.id()); + for (AssignmentPlan.Deployment m : plan.deployments()) { + Map nodeIdToAllocations = allocationsByNodeIdByDeploymentId.get(m.deploymentId()); Optional> assignments = plan.assignments(m); if (assignments.isPresent()) { for (Map.Entry nodeAssignments : assignments.get().entrySet()) { @@ -212,6 +225,6 @@ private Map> mergeAllocationsByNodeIdByModelId(List } } } - return allocationsByNodeIdByModelId; + return allocationsByNodeIdByDeploymentId; } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java index d84c04f0c41f1..3f93c3431d891 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlanTests.java @@ -69,7 +69,7 @@ public void testAssignModelToNode_GivenNoPreviousAssignment() { AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); } @@ -102,7 +102,7 @@ public void testAssignModelToNode_GivenNoPreviousAssignment() { AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); } @@ -134,7 +134,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); } @@ -162,7 +162,7 @@ public void testAssignModelToNode_GivenNewPlanSatisfiesCurrentAssignment() { AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); @@ -186,7 +186,7 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(false)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); } @@ -215,7 +215,7 @@ public void testAssignModelToNode_GivenNewPlanDoesNotSatisfyCurrentAssignment() AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(false)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 1))); } @@ -251,7 +251,7 @@ public void testAssignModelToNode_GivenPreviouslyAssignedModelDoesNotFit() { builder.assignModelToNode(m, n, 2); AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 2))); } @@ -274,7 +274,7 @@ public void testAssignModelToNode_GivenPreviouslyAssignedModelDoesNotFit() { builder.assignModelToNode(m, n, 2); AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 2))); } @@ -355,7 +355,7 @@ public void testAssignModelToNode_GivenSameModelAssignedTwice() { AssignmentPlan plan = builder.build(); - assertThat(plan.models(), contains(m)); + assertThat(plan.deployments(), contains(m)); assertThat(plan.satisfiesCurrentAssignments(), is(true)); assertThat(plan.assignments(m).get(), equalTo(Map.of(n, 3))); } @@ -511,7 +511,7 @@ public void testCompareTo_GivenDifferenceInMemory() { assertThat(planUsingMoreMemory.compareTo(planUsingLessMemory), lessThan(0)); } - public void testSatisfiesAllModels_GivenAllModelsAreSatisfied() { + public void testSatisfiesAllModels_GivenAllDeploymentsAreSatisfied() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); { @@ -602,7 +602,7 @@ public void testSatisfiesAllModels_GivenAllModelsAreSatisfied() { } } - public void testSatisfiesAllModels_GivenOneModelHasOneAllocationLess() { + public void testSatisfiesAllDeployments_GivenOneModelHasOneAllocationLess() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 0, null, 0, 0); @@ -617,7 +617,7 @@ public void testSatisfiesAllModels_GivenOneModelHasOneAllocationLess() { assertThat(plan.satisfiesAllModels(), is(false)); } - public void testArePreviouslyAssignedModelsAssigned_GivenTrue() { + public void testArePreviouslyAssignedDeploymentsAssigned_GivenTrue() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); @@ -630,7 +630,7 @@ public void testArePreviouslyAssignedModelsAssigned_GivenTrue() { assertThat(plan.arePreviouslyAssignedModelsAssigned(), is(true)); } - public void testArePreviouslyAssignedModelsAssigned_GivenFalse() { + public void testArePreviouslyAssignedDeploymentsAssigned_GivenFalse() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(50).getBytes(), 1, 2, Map.of(), 3, null, 0, 0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java index ef76c388b81a1..24095600c42d0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java @@ -261,7 +261,7 @@ public void testModelWithMoreAllocationsThanAvailableCores_GivenSingleThreadPerA } } - public void testMultipleModelsAndNodesWithSingleSolution() { + public void testMultipleDeploymentsAndNodesWithSingleSolution() { Node node1 = new Node("n_1", 2 * scaleNodeSize(50), 7); Node node2 = new Node("n_2", 2 * scaleNodeSize(50), 7); Node node3 = new Node("n_3", 2 * scaleNodeSize(50), 2); @@ -316,7 +316,7 @@ public void testMultipleModelsAndNodesWithSingleSolution() { } } - public void testMultipleModelsAndNodesWithSingleSolution_NewMemoryFields() { + public void testMultipleDeploymentsAndNodesWithSingleSolution_NewMemoryFields() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(800).getBytes(), 7); Node node2 = new Node("n_2", ByteSizeValue.ofMb(800).getBytes(), 7); Node node3 = new Node("n_3", ByteSizeValue.ofMb(900).getBytes(), 2); @@ -508,7 +508,7 @@ public void testModelWithPreviousAssignmentAndNoMoreCoresAvailable() { assertThat(plan.assignments(deployment).get(), equalTo(Map.of(node, 4))); } - public void testFullCoreUtilization_GivenModelsWithSingleThreadPerAllocation() { + public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocation() { List nodes = List.of( new Node("n_1", ByteSizeValue.ofGb(18).getBytes(), 8), new Node("n_2", ByteSizeValue.ofGb(18).getBytes(), 8), @@ -544,7 +544,7 @@ public void testFullCoreUtilization_GivenModelsWithSingleThreadPerAllocation() { assertPreviousAssignmentsAreSatisfied(deployments, assignmentPlan); } - public void testFullCoreUtilization_GivenModelsWithSingleThreadPerAllocation_NewMemoryFields() { + public void testFullCoreUtilization_GivenDeploymentsWithSingleThreadPerAllocation_NewMemoryFields() { List nodes = List.of( new Node("n_1", ByteSizeValue.ofGb(18).getBytes(), 8), new Node("n_2", ByteSizeValue.ofGb(18).getBytes(), 8), @@ -641,32 +641,32 @@ public void testFullCoreUtilization_GivenModelsWithSingleThreadPerAllocation_New assertPreviousAssignmentsAreSatisfied(deployments, assignmentPlan); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenNodesJustUnderLimit() { - runTooManyNodesAndModels(3161, 1); + public void testTooManyNodesAndDeployments_DoesNotThrowOOM_GivenNodesJustUnderLimit() { + runTooManyNodesAndDeployments(3161, 1); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenNodesJustOverLimit() { - runTooManyNodesAndModels(3162, 1); + public void testTooManyNodesAndDeployments_DoesNotThrowOOM_GivenNodesJustOverLimit() { + runTooManyNodesAndDeployments(3162, 1); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenModelsJustUnderLimit() { - runTooManyNodesAndModels(1, 3161); + public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenDeploymentsJustUnderLimit() { + runTooManyNodesAndDeployments(1, 3161); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenModelsJustOverLimit() { - runTooManyNodesAndModels(1, 3162); + public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenDeploymentsJustOverLimit() { + runTooManyNodesAndDeployments(1, 3162); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenComboJustUnderLimit() { - runTooManyNodesAndModels(170, 171); + public void testTooManyNodesAndDeployments_DoesNotThrowOOM_GivenComboJustUnderLimit() { + runTooManyNodesAndDeployments(170, 171); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenComboJustOverLimit() { - runTooManyNodesAndModels(171, 171); + public void testTooManyNodesAndDeployments_DoesNotThrowOOM_GivenComboJustOverLimit() { + runTooManyNodesAndDeployments(171, 171); } - public void testTooManyNodesAndModels_DoesNotThrowOOM_GivenComboWayOverLimit() { - runTooManyNodesAndModels(1000, 1000); + public void testTooManyNodesAndDeployments_DoesNotThrowOOM_GivenComboWayOverLimit() { + runTooManyNodesAndDeployments(1000, 1000); } public void testRandomBenchmark() { @@ -679,7 +679,7 @@ public void testRandomBenchmark() { int scale = randomIntBetween(0, 10); double load = randomDoubleBetween(0.1, 1.0, true); List nodes = randomNodes(scale); - List deployments = randomModels(scale, load); + List deployments = randomDeployments(scale, load); nodeSizes.add(nodes.size()); modelSizes.add(deployments.size()); logger.debug("Nodes = " + nodes.size() + "; Models = " + deployments.size()); @@ -719,7 +719,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode int scale = randomIntBetween(0, 10); double load = randomDoubleBetween(0.1, 1.0, true); List nodes = randomNodes(scale); - List deployments = randomModels(scale, load); + List deployments = randomDeployments(scale, load); AssignmentPlan originalPlan = new AssignmentPlanner(nodes, deployments).computePlan(); List previousModelsPlusNew = new ArrayList<>(deployments.size() + 1); @@ -730,7 +730,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode .collect(Collectors.toMap(e -> e.getKey().id(), Map.Entry::getValue)); previousModelsPlusNew.add( new AssignmentPlan.Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -827,7 +827,7 @@ public void testModelWithoutCurrentAllocationsGetsAssignedIfAllocatedPreviously( assertThat(assignmentPlan.getRemainingNodeMemory("n_2"), greaterThanOrEqualTo(0L)); } - public void testGivenPreviouslyAssignedModels_CannotAllBeAllocated() { + public void testGivenPreviouslyAssignedDeployments_CannotAllBeAllocated() { Node node1 = new Node("n_1", scaleNodeSize(ByteSizeValue.ofGb(2).getMb()), 2); AssignmentPlan.Deployment deployment1 = new Deployment("m_1", ByteSizeValue.ofMb(1200).getBytes(), 1, 1, Map.of(), 1, null, 0, 0); AssignmentPlan.Deployment deployment2 = new Deployment("m_2", ByteSizeValue.ofMb(1100).getBytes(), 1, 1, Map.of(), 1, null, 0, 0); @@ -854,7 +854,7 @@ public void testGivenClusterResize_AllocationShouldNotExceedMemoryConstraints() // Then start m_2 assignmentPlan = new AssignmentPlanner( List.of(node1, node2), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -865,7 +865,7 @@ public void testGivenClusterResize_AllocationShouldNotExceedMemoryConstraints() // Then start m_3 assignmentPlan = new AssignmentPlanner( List.of(node1, node2), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -875,7 +875,7 @@ public void testGivenClusterResize_AllocationShouldNotExceedMemoryConstraints() assertThat(indexedBasedPlan.get("m_3"), equalTo(Map.of("n_2", 1))); // First, one node goes away. - assignmentPlan = new AssignmentPlanner(List.of(node1), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory("n_1"), greaterThanOrEqualTo(0L)); } @@ -896,7 +896,7 @@ public void testGivenClusterResize_ShouldAllocateEachModelAtLeastOnce() { // Then start m_2 assignmentPlan = new AssignmentPlanner( List.of(node1, node2), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -907,7 +907,7 @@ public void testGivenClusterResize_ShouldAllocateEachModelAtLeastOnce() { // Then start m_3 assignmentPlan = new AssignmentPlanner( List.of(node1, node2), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -921,20 +921,20 @@ public void testGivenClusterResize_ShouldAllocateEachModelAtLeastOnce() { Node node4 = new Node("n_4", ByteSizeValue.ofMb(2600).getBytes(), 2); // First, one node goes away. - assignmentPlan = new AssignmentPlanner(List.of(node1), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory(node1.id()), greaterThanOrEqualTo(0L)); // Then, a node double in memory size is added. - assignmentPlan = new AssignmentPlanner(List.of(node1, node3), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1, node3), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory(node1.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeMemory(node3.id()), greaterThanOrEqualTo(0L)); // And another. - assignmentPlan = new AssignmentPlanner(List.of(node1, node3, node4), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1, node3, node4), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory(node1.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeMemory(node3.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeMemory(node4.id()), greaterThanOrEqualTo(0L)); // Finally, the remaining smaller node is removed - assignmentPlan = new AssignmentPlanner(List.of(node3, node4), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node3, node4), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(assignmentPlan.getRemainingNodeMemory(node3.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeMemory(node4.id()), greaterThanOrEqualTo(0L)); @@ -949,7 +949,7 @@ public void testGivenClusterResize_ShouldAllocateEachModelAtLeastOnce() { assertThat(assignmentPlan.getRemainingNodeCores("n_2"), equalTo(0)); } - public void testGivenClusterResize_ShouldRemoveAllocatedModels() { + public void testGivenClusterResize_ShouldRemoveAllocatedDeployments() { // Ensure that plan is removing previously allocated models if not enough memory is available Node node1 = new Node("n_1", ByteSizeValue.ofMb(1840).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(2580).getBytes(), 2); @@ -969,14 +969,14 @@ public void testGivenClusterResize_ShouldRemoveAllocatedModels() { assertThat(assignmentPlan.getRemainingNodeMemory(node2.id()), greaterThanOrEqualTo(0L)); // Now the cluster starts getting resized. Ensure that resources are not over-allocated. - assignmentPlan = new AssignmentPlanner(List.of(node1), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(indexedBasedPlan.get("m_1"), equalTo(Map.of("n_1", 2))); assertThat(assignmentPlan.getRemainingNodeMemory(node1.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeCores(node1.id()), greaterThanOrEqualTo(0)); } - public void testGivenClusterResize_ShouldRemoveAllocatedModels_NewMemoryFields() { + public void testGivenClusterResize_ShouldRemoveAllocatedDeployments_NewMemoryFields() { // Ensure that plan is removing previously allocated models if not enough memory is available Node node1 = new Node("n_1", ByteSizeValue.ofMb(700).getBytes(), 2); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 2); @@ -1026,16 +1026,16 @@ public void testGivenClusterResize_ShouldRemoveAllocatedModels_NewMemoryFields() assertThat(assignmentPlan.getRemainingNodeMemory(node2.id()), greaterThanOrEqualTo(0L)); // Now the cluster starts getting resized. Ensure that resources are not over-allocated. - assignmentPlan = new AssignmentPlanner(List.of(node1), createModelsFromPlan(assignmentPlan)).computePlan(); + assignmentPlan = new AssignmentPlanner(List.of(node1), createDeploymentsFromPlan(assignmentPlan)).computePlan(); assertThat(indexedBasedPlan.get("m_1"), equalTo(Map.of("n_1", 2))); assertThat(assignmentPlan.getRemainingNodeMemory(node1.id()), greaterThanOrEqualTo(0L)); assertThat(assignmentPlan.getRemainingNodeCores(node1.id()), greaterThanOrEqualTo(0)); } - public static List createModelsFromPlan(AssignmentPlan plan) { + public static List createDeploymentsFromPlan(AssignmentPlan plan) { List deployments = new ArrayList<>(); - for (Deployment m : plan.models()) { + for (Deployment m : plan.deployments()) { Optional> assignments = plan.assignments(m); Map currentAllocations = Map.of(); if (assignments.isPresent()) { @@ -1047,7 +1047,7 @@ public static List createModelsFromPlan(AssignmentPlan plan) { int totalAllocations = currentAllocations.values().stream().mapToInt(Integer::intValue).sum(); deployments.add( new Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -1064,13 +1064,13 @@ public static List createModelsFromPlan(AssignmentPlan plan) { public static Map> convertToIdIndexed(AssignmentPlan plan) { Map> result = new HashMap<>(); - for (AssignmentPlan.Deployment m : plan.models()) { + for (AssignmentPlan.Deployment m : plan.deployments()) { Optional> assignments = plan.assignments(m); Map allocationsPerNodeId = assignments.isPresent() ? new HashMap<>() : Map.of(); for (Map.Entry nodeAssignments : assignments.orElse(Map.of()).entrySet()) { allocationsPerNodeId.put(nodeAssignments.getKey().id(), nodeAssignments.getValue()); } - result.put(m.id(), allocationsPerNodeId); + result.put(m.deploymentId(), allocationsPerNodeId); } return result; } @@ -1103,7 +1103,7 @@ public static List randomNodes(int scale, String nodeIdPrefix) { return nodes; } - public static List randomModels(int scale, double load) { + public static List randomDeployments(int scale, double load) { List deployments = new ArrayList<>(); for (int i = 0; i < Math.max(2, Math.round(load * (1 + 8 * scale))); i++) { deployments.add(randomModel(String.valueOf(i))); @@ -1158,7 +1158,7 @@ public static void assertPreviousAssignmentsAreSatisfied(List nodes = new ArrayList<>(); for (int i = 0; i < nodesSize; i++) { nodes.add(new Node("n_" + i, ByteSizeValue.ofGb(6).getBytes(), 100)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java index 9885c4d583198..7499470cc8d6f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java @@ -83,13 +83,13 @@ public void testGivenPreviousAssignments() { List modelsPreservingAllocations = preserveAllAllocations.modelsPreservingAllocations(); assertThat(modelsPreservingAllocations, hasSize(2)); - assertThat(modelsPreservingAllocations.get(0).id(), equalTo("m_1")); + assertThat(modelsPreservingAllocations.get(0).deploymentId(), equalTo("m_1")); assertThat(modelsPreservingAllocations.get(0).memoryBytes(), equalTo(ByteSizeValue.ofMb(30).getBytes())); assertThat(modelsPreservingAllocations.get(0).allocations(), equalTo(1)); assertThat(modelsPreservingAllocations.get(0).threadsPerAllocation(), equalTo(1)); assertThat(modelsPreservingAllocations.get(0).currentAllocationsByNodeId(), equalTo(Map.of("n_1", 0))); - assertThat(modelsPreservingAllocations.get(1).id(), equalTo("m_2")); + assertThat(modelsPreservingAllocations.get(1).deploymentId(), equalTo("m_2")); assertThat(modelsPreservingAllocations.get(1).memoryBytes(), equalTo(ByteSizeValue.ofMb(50).getBytes())); assertThat(modelsPreservingAllocations.get(1).allocations(), equalTo(3)); assertThat(modelsPreservingAllocations.get(1).threadsPerAllocation(), equalTo(4)); @@ -166,7 +166,7 @@ public void testGivenPreviousAssignments() { List modelsPreservingAllocations = preserveAllAllocations.modelsPreservingAllocations(); assertThat(modelsPreservingAllocations, hasSize(2)); - assertThat(modelsPreservingAllocations.get(0).id(), equalTo("m_1")); + assertThat(modelsPreservingAllocations.get(0).deploymentId(), equalTo("m_1")); assertThat(modelsPreservingAllocations.get(0).memoryBytes(), equalTo(ByteSizeValue.ofMb(30).getBytes())); assertThat(modelsPreservingAllocations.get(0).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(300).getBytes())); assertThat(modelsPreservingAllocations.get(0).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(10).getBytes())); @@ -174,7 +174,7 @@ public void testGivenPreviousAssignments() { assertThat(modelsPreservingAllocations.get(0).threadsPerAllocation(), equalTo(1)); assertThat(modelsPreservingAllocations.get(0).currentAllocationsByNodeId(), equalTo(Map.of("n_1", 0))); - assertThat(modelsPreservingAllocations.get(1).id(), equalTo("m_2")); + assertThat(modelsPreservingAllocations.get(1).deploymentId(), equalTo("m_2")); assertThat(modelsPreservingAllocations.get(1).memoryBytes(), equalTo(ByteSizeValue.ofMb(50).getBytes())); assertThat(modelsPreservingAllocations.get(1).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(300).getBytes())); assertThat(modelsPreservingAllocations.get(1).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(10).getBytes())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java index 50ba8763c690d..bc95fb1e0339e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java @@ -77,7 +77,7 @@ public void testGivenPreviousAssignments() { List modelsPreservingAllocations = preserveOneAllocation.modelsPreservingAllocations(); assertThat(modelsPreservingAllocations, hasSize(2)); - assertThat(modelsPreservingAllocations.get(0).id(), equalTo("m_1")); + assertThat(modelsPreservingAllocations.get(0).deploymentId(), equalTo("m_1")); assertThat(modelsPreservingAllocations.get(0).memoryBytes(), equalTo(ByteSizeValue.ofMb(30).getBytes())); assertThat(modelsPreservingAllocations.get(0).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(0).getBytes())); assertThat(modelsPreservingAllocations.get(0).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(0).getBytes())); @@ -85,7 +85,7 @@ public void testGivenPreviousAssignments() { assertThat(modelsPreservingAllocations.get(0).threadsPerAllocation(), equalTo(1)); assertThat(modelsPreservingAllocations.get(0).currentAllocationsByNodeId(), equalTo(Map.of("n_1", 0))); - assertThat(modelsPreservingAllocations.get(1).id(), equalTo("m_2")); + assertThat(modelsPreservingAllocations.get(1).deploymentId(), equalTo("m_2")); assertThat(modelsPreservingAllocations.get(1).memoryBytes(), equalTo(ByteSizeValue.ofMb(50).getBytes())); assertThat(modelsPreservingAllocations.get(1).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(0).getBytes())); assertThat(modelsPreservingAllocations.get(1).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(0).getBytes())); @@ -165,7 +165,7 @@ public void testGivenPreviousAssignments() { List modelsPreservingAllocations = preserveOneAllocation.modelsPreservingAllocations(); assertThat(modelsPreservingAllocations, hasSize(2)); - assertThat(modelsPreservingAllocations.get(0).id(), equalTo("m_1")); + assertThat(modelsPreservingAllocations.get(0).deploymentId(), equalTo("m_1")); assertThat(modelsPreservingAllocations.get(0).memoryBytes(), equalTo(ByteSizeValue.ofMb(30).getBytes())); assertThat(modelsPreservingAllocations.get(0).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(300).getBytes())); assertThat(modelsPreservingAllocations.get(0).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(10).getBytes())); @@ -173,7 +173,7 @@ public void testGivenPreviousAssignments() { assertThat(modelsPreservingAllocations.get(0).threadsPerAllocation(), equalTo(1)); assertThat(modelsPreservingAllocations.get(0).currentAllocationsByNodeId(), equalTo(Map.of("n_1", 0))); - assertThat(modelsPreservingAllocations.get(1).id(), equalTo("m_2")); + assertThat(modelsPreservingAllocations.get(1).deploymentId(), equalTo("m_2")); assertThat(modelsPreservingAllocations.get(1).memoryBytes(), equalTo(ByteSizeValue.ofMb(50).getBytes())); assertThat(modelsPreservingAllocations.get(1).perDeploymentMemoryBytes(), equalTo(ByteSizeValue.ofMb(300).getBytes())); assertThat(modelsPreservingAllocations.get(1).perAllocationMemoryBytes(), equalTo(ByteSizeValue.ofMb(10).getBytes())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java index 4993600d0d3b3..7005ad959577b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlannerTests.java @@ -22,9 +22,9 @@ import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.assertModelFullyAssignedToNode; import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.assertPreviousAssignmentsAreSatisfied; import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.convertToIdIndexed; -import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.createModelsFromPlan; +import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.createDeploymentsFromPlan; +import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.randomDeployments; import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.randomModel; -import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.randomModels; import static org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlannerTests.randomNodes; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -138,6 +138,33 @@ public void testGivenOneModel_OneNodePerZone_TwoZones_FullyFits() { assertThat(indexedBasedPlan.get("m_1"), equalTo(Map.of("n_1", 1, "n_2", 1))); } + public void testGivenOneModel_OneLargeNodePerZone_TwoZones_FullyFits() { + Node node1 = new Node("n_1", ByteSizeValue.ofGb(16).getBytes(), 8); + Node node2 = new Node("n_2", ByteSizeValue.ofGb(16).getBytes(), 8); + AssignmentPlan.Deployment deployment = new AssignmentPlan.Deployment( + "m_1", + ByteSizeValue.ofMb(100).getBytes(), + 4, + 2, + Map.of(), + 0, + null, + 0, + 0 + ); + + AssignmentPlan plan = new ZoneAwareAssignmentPlanner( + Map.of(List.of("z_1"), List.of(node1), List.of("z_2"), List.of(node2)), + List.of(deployment) + ).computePlan(); + + assertThat(plan.satisfiesAllModels(), is(true)); + + Map> indexedBasedPlan = convertToIdIndexed(plan); + assertThat(indexedBasedPlan.keySet(), hasItems("m_1")); + assertThat(indexedBasedPlan.get("m_1"), equalTo(Map.of("n_1", 2, "n_2", 2))); + } + public void testGivenOneModel_OneNodePerZone_TwoZones_PartiallyFits() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(440).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(440).getBytes(), 4); @@ -166,7 +193,7 @@ public void testGivenOneModel_OneNodePerZone_TwoZones_PartiallyFits() { assertThat(plan.getRemainingNodeMemory("n_2"), equalTo(0L)); } - public void testGivenThreeModels_TwoNodesPerZone_ThreeZones_FullyFit() { + public void testGivenThreeDeployments_TwoNodesPerZone_ThreeZones_FullyFit() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node3 = new Node("n_3", ByteSizeValue.ofMb(1000).getBytes(), 4); @@ -217,7 +244,7 @@ public void testGivenThreeModels_TwoNodesPerZone_ThreeZones_FullyFit() { } } - public void testGivenTwoModelsWithSingleAllocation_OneNode_ThreeZones() { + public void testGivenTwoDeploymentsWithSingleAllocation_OneNode_ThreeZones() { Node node1 = new Node("n_1", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node2 = new Node("n_2", ByteSizeValue.ofMb(1000).getBytes(), 4); Node node3 = new Node("n_3", ByteSizeValue.ofMb(1000).getBytes(), 4); @@ -243,7 +270,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode List.of("z_3"), randomNodes(scale, "z_3_") ); - List deployments = randomModels(scale, load); + List deployments = randomDeployments(scale, load); AssignmentPlan originalPlan = new ZoneAwareAssignmentPlanner(nodesByZone, deployments).computePlan(); List previousModelsPlusNew = new ArrayList<>(deployments.size() + 1); @@ -254,7 +281,7 @@ public void testPreviousAssignmentsGetAtLeastAsManyAllocationsAfterAddingNewMode .collect(Collectors.toMap(e -> e.getKey().id(), Map.Entry::getValue)); previousModelsPlusNew.add( new AssignmentPlan.Deployment( - m.id(), + m.deploymentId(), m.memoryBytes(), m.allocations(), m.threadsPerAllocation(), @@ -291,7 +318,7 @@ public void testGivenClusterResize_GivenOneZone_ShouldAllocateEachModelAtLeastOn // Then start m_2 assignmentPlan = new ZoneAwareAssignmentPlanner( Map.of(List.of(), List.of(node1, node2)), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment2)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -302,7 +329,7 @@ public void testGivenClusterResize_GivenOneZone_ShouldAllocateEachModelAtLeastOn // Then start m_3 assignmentPlan = new ZoneAwareAssignmentPlanner( Map.of(List.of(), List.of(node1, node2)), - Stream.concat(createModelsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() + Stream.concat(createDeploymentsFromPlan(assignmentPlan).stream(), Stream.of(deployment3)).toList() ).computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); @@ -316,19 +343,19 @@ public void testGivenClusterResize_GivenOneZone_ShouldAllocateEachModelAtLeastOn Node node4 = new Node("n_4", ByteSizeValue.ofMb(5160).getBytes(), 2); // First, one node goes away. - assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node1)), createModelsFromPlan(assignmentPlan)) + assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node1)), createDeploymentsFromPlan(assignmentPlan)) .computePlan(); // Then, a node double in memory size is added. - assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node1, node3)), createModelsFromPlan(assignmentPlan)) + assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node1, node3)), createDeploymentsFromPlan(assignmentPlan)) .computePlan(); // And another. assignmentPlan = new ZoneAwareAssignmentPlanner( Map.of(List.of(), List.of(node1, node3, node4)), - createModelsFromPlan(assignmentPlan) + createDeploymentsFromPlan(assignmentPlan) ).computePlan(); // Finally, the remaining smaller node is removed - assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node3, node4)), createModelsFromPlan(assignmentPlan)) + assignmentPlan = new ZoneAwareAssignmentPlanner(Map.of(List.of(), List.of(node3, node4)), createDeploymentsFromPlan(assignmentPlan)) .computePlan(); indexedBasedPlan = convertToIdIndexed(assignmentPlan); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java index 5f8744ace090d..458dee693c80a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleDownsamplingSecurityIT.java @@ -342,7 +342,7 @@ private void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings, null, lifecycle)) + .template(Template.builder().settings(settings).mappings(mappings).lifecycle(lifecycle)) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() @@ -442,7 +442,7 @@ public Collection getSystemDataStreamDescriptors() { SystemDataStreamDescriptor.Type.EXTERNAL, ComposableIndexTemplate.builder() .indexPatterns(List.of(SYSTEM_DATA_STREAM_NAME)) - .template(new Template(settings.build(), getTSDBMappings(), null, LIFECYCLE)) + .template(Template.builder().settings(settings).mappings(getTSDBMappings()).lifecycle(LIFECYCLE)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(), Map.of(), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java index 2ab51bece41ea..2c98d2e686e46 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamLifecycleServiceRuntimeSecurityIT.java @@ -227,7 +227,12 @@ private static void putComposableIndexTemplate( request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(patterns) - .template(new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle)) + .template( + Template.builder() + .settings(settings) + .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) + .lifecycle(lifecycle) + ) .metadata(metadata) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() @@ -266,7 +271,7 @@ public Collection getSystemDataStreamDescriptors() { SystemDataStreamDescriptor.Type.EXTERNAL, ComposableIndexTemplate.builder() .indexPatterns(List.of(SYSTEM_DATA_STREAM_NAME)) - .template(new Template(Settings.EMPTY, null, null, DataStreamLifecycle.newBuilder().dataRetention(0).build())) + .template(Template.builder().lifecycle(DataStreamLifecycle.newBuilder().dataRetention(0))) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(), Map.of(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/InitialNodeSecurityAutoConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/InitialNodeSecurityAutoConfiguration.java index e84f6f3efeadb..192a5a1b8bb15 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/InitialNodeSecurityAutoConfiguration.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/InitialNodeSecurityAutoConfiguration.java @@ -11,11 +11,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.bootstrap.ConsoleLoader; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.TimeValue; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 7c09adc276c3e..4f7ba7808b823 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.DocWriteRequest.OpType; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse.Result; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -41,6 +40,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoader.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoader.java index 063cc85ea0187..65dcfc26ae8fa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoader.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoader.java @@ -146,7 +146,7 @@ private void handleReloadedContentAndJwksAlgs(byte[] bytes) { assert newContentAndJwksAlgs != null; assert contentAndJwksAlgs != null; if ((Arrays.equals(contentAndJwksAlgs.sha256, newContentAndJwksAlgs.sha256)) == false) { - logger.debug( + logger.info( "Reloaded JWK set from sha256=[{}] to sha256=[{}]", MessageDigests.toHexString(contentAndJwksAlgs.sha256), MessageDigests.toHexString(newContentAndJwksAlgs.sha256) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index dd2377ec773c4..b347ceb833f64 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; @@ -37,6 +36,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 3a39d54567726..29b8037de5a66 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -379,7 +379,7 @@ protected Function getClientWrapper() { // user. This is ok for internal n2n stuff but the test framework does other things like wiping indices, repositories, etc // that the system user cannot do. so we wrap the node client with a user that can do these things since the client() calls // return a node client - return client -> (client instanceof NodeClient) ? client.filterWithHeader(headers) : client; + return client -> asInstanceOf(NodeClient.class, client).filterWithHeader(headers); } /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java index 0a1f5f801143d..dd6c41b0a10eb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java @@ -13,11 +13,11 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; -import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index 72de6c99191cc..6d7822e5619cc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.fieldcaps.FieldCapsUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.util.Maps; @@ -466,8 +467,9 @@ public void testMergeObjectUnsupportedTypes() throws Exception { private static FieldCapabilitiesResponse readFieldCapsResponse(String resourceName) throws IOException { InputStream stream = IndexResolverTests.class.getResourceAsStream("/" + resourceName); BytesReference ref = Streams.readFully(stream); - XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, ref, XContentType.JSON); - return FieldCapabilitiesResponse.fromXContent(parser); + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, ref, XContentType.JSON)) { + return FieldCapsUtils.parseFieldCapsResponse(parser); + } } public static IndexResolution merge(EsIndex... indices) { diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 7abce10a82f3c..4b9ccff6f526c 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -408,11 +408,11 @@ public void testSearchResultsVersusKeywordField() throws IOException { SearchExecutionContext searchExecutionContext = createMockContext(); FieldSortBuilder wildcardSortBuilder = new FieldSortBuilder(WILDCARD_FIELD_NAME); - SortField wildcardSortField = wildcardSortBuilder.build(searchExecutionContext).field; + SortField wildcardSortField = wildcardSortBuilder.build(searchExecutionContext).field(); ScoreDoc[] wildcardHits = searcher.search(new MatchAllDocsQuery(), numDocs, new Sort(wildcardSortField)).scoreDocs; FieldSortBuilder keywordSortBuilder = new FieldSortBuilder(KEYWORD_FIELD_NAME); - SortField keywordSortField = keywordSortBuilder.build(searchExecutionContext).field; + SortField keywordSortField = keywordSortBuilder.build(searchExecutionContext).field(); ScoreDoc[] keywordHits = searcher.search(new MatchAllDocsQuery(), numDocs, new Sort(keywordSortField)).scoreDocs; assertThat(wildcardHits.length, equalTo(keywordHits.length));