forked from opensearch-project/OpenSearch
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' into comp-agg-optim
- Loading branch information
Showing
71 changed files
with
6,356 additions
and
383 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
70 changes: 70 additions & 0 deletions
70
...yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
# integration tests for queries with specific analysis chains | ||
|
||
"match query with stacked stems": | ||
- skip: | ||
version: " - 2.11.99" | ||
reason: "match_only_text was added in 2.12" | ||
# Tests the match query stemmed tokens are "stacked" on top of the unstemmed | ||
# versions in the same position. | ||
- do: | ||
indices.create: | ||
index: test | ||
body: | ||
settings: | ||
number_of_shards: 1 | ||
number_of_replicas: 1 | ||
analysis: | ||
analyzer: | ||
index: | ||
tokenizer: standard | ||
filter: [lowercase] | ||
search: | ||
rest_total_hits_as_int: true | ||
tokenizer: standard | ||
filter: [lowercase, keyword_repeat, porter_stem, unique_stem] | ||
filter: | ||
unique_stem: | ||
type: unique | ||
only_on_same_position: true | ||
mappings: | ||
properties: | ||
text: | ||
type: match_only_text | ||
analyzer: index | ||
search_analyzer: search | ||
|
||
- do: | ||
index: | ||
index: test | ||
id: 1 | ||
body: { "text": "the fox runs across the street" } | ||
refresh: true | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
text: | ||
query: fox runs | ||
operator: AND | ||
- match: {hits.total: 1} | ||
|
||
- do: | ||
index: | ||
index: test | ||
id: 2 | ||
body: { "text": "run fox run" } | ||
refresh: true | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
text: | ||
query: fox runs | ||
operator: AND | ||
- match: {hits.total: 2} |
144 changes: 144 additions & 0 deletions
144
...tTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,144 @@ | ||
"ngram search": | ||
- skip: | ||
version: " - 2.11.99" | ||
reason: "match_only_text was added in 2.12" | ||
- do: | ||
indices.create: | ||
index: test | ||
body: | ||
settings: | ||
number_of_shards: 1 | ||
number_of_replicas: 0 | ||
analysis: | ||
analyzer: | ||
my_analyzer: | ||
tokenizer: standard | ||
filter: [my_ngram] | ||
filter: | ||
my_ngram: | ||
type: ngram | ||
min: 2, | ||
max: 2 | ||
mappings: | ||
properties: | ||
text: | ||
type: match_only_text | ||
analyzer: my_analyzer | ||
|
||
- do: | ||
index: | ||
index: test | ||
id: 1 | ||
body: { "text": "foo bar baz" } | ||
refresh: true | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
text: | ||
query: foa | ||
- match: {hits.total: 1} | ||
|
||
--- | ||
"testNGramCopyField": | ||
- skip: | ||
version: " - 2.11.99" | ||
reason: "match_only_text was added in 2.12" | ||
- do: | ||
indices.create: | ||
index: test | ||
body: | ||
settings: | ||
number_of_shards: 1 | ||
number_of_replicas: 0 | ||
max_ngram_diff: 9 | ||
analysis: | ||
analyzer: | ||
my_ngram_analyzer: | ||
tokenizer: my_ngram_tokenizer | ||
tokenizer: | ||
my_ngram_tokenizer: | ||
type: ngram | ||
min: 1, | ||
max: 10 | ||
token_chars: [] | ||
mappings: | ||
properties: | ||
origin: | ||
type: match_only_text | ||
copy_to: meta | ||
meta: | ||
type: match_only_text | ||
analyzer: my_ngram_analyzer | ||
|
||
- do: | ||
index: | ||
index: test | ||
id: 1 | ||
body: { "origin": "C.A1234.5678" } | ||
refresh: true | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
meta: | ||
query: 1234 | ||
- match: {hits.total: 1} | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
meta: | ||
query: 1234.56 | ||
- match: {hits.total: 1} | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
meta: | ||
query: A1234 | ||
- match: {hits.total: 1} | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
term: | ||
meta: | ||
value: a1234 | ||
- match: {hits.total: 0} | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
meta: | ||
query: A1234 | ||
analyzer: my_ngram_analyzer | ||
- match: {hits.total: 1} | ||
|
||
- do: | ||
search: | ||
rest_total_hits_as_int: true | ||
body: | ||
query: | ||
match: | ||
meta: | ||
query: a1234 | ||
analyzer: my_ngram_analyzer | ||
- match: {hits.total: 1} |
Oops, something went wrong.