Skip to content

Commit

Permalink
feat(index): extend opensearch_index resource capabilities (#39)
Browse files Browse the repository at this point in the history
* Extend elasticsearch_index resource capabilities + move var.indices to map(object({})) instead of map(any)

* change required version of terraform to 1.3 because of optional variables inside object type

* add missing attribute search_slowlog_threshold_fetch_info in var. indices

* fix: prevent replacing existing indices

---------

Co-authored-by: Steve Teuber <[email protected]>
  • Loading branch information
Amos-85 and steveteuber authored Dec 5, 2023
1 parent 0388438 commit 43c7233
Show file tree
Hide file tree
Showing 4 changed files with 155 additions and 14 deletions.
37 changes: 33 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ data "http" "saml_metadata" {
url = local.saml_metadata_url
}
provider "elasticsearch" {
provider "opensearch" {
url = module.opensearch.cluster_endpoint
aws_region = data.aws_region.current.name
healthcheck = false
Expand All @@ -54,10 +54,39 @@ module "opensearch" {
saml_metadata_content = data.http.saml_metadata.body
indices = {
example-index = {
example-index-1 = {
number_of_shards = 2
number_of_replicas = 1
}
example-index-2 = {
number_of_shards = 2
number_of_replicas = 1
mappings = {
"properties" : {
"id" : {
"type" : "text"
},
"name" : {
"type" : "text"
},
"containerType" : {
"type" : "text"
},
"containerIds" : {
"type" : "text"
},
"synonyms" : {
"type" : "text"
},
"parentEvents" : {
"type" : "text"
},
"valueType" : {
"type" : "text"
}
}
}
}
}
}
```
Expand All @@ -74,7 +103,7 @@ Here is a working example of using this Terraform module:

| Name | Version |
|------|---------|
| <a name="requirement_terraform"></a> [terraform](#requirement\_terraform) | >= 1.0.0 |
| <a name="requirement_terraform"></a> [terraform](#requirement\_terraform) | >= 1.3.0 |
| <a name="requirement_aws"></a> [aws](#requirement\_aws) | >= 4.12.0 |
| <a name="requirement_opensearch"></a> [opensearch](#requirement\_opensearch) | >= 2.0.0 |

Expand Down Expand Up @@ -144,7 +173,7 @@ Here is a working example of using this Terraform module:
| <a name="input_index_files"></a> [index\_files](#input\_index\_files) | A set of all index files to create. | `set(string)` | `[]` | no |
| <a name="input_index_template_files"></a> [index\_template\_files](#input\_index\_template\_files) | A set of all index template files to create. | `set(string)` | `[]` | no |
| <a name="input_index_templates"></a> [index\_templates](#input\_index\_templates) | A map of all index templates to create. | `map(any)` | `{}` | no |
| <a name="input_indices"></a> [indices](#input\_indices) | A map of all indices to create. | `map(any)` | `{}` | no |
| <a name="input_indices"></a> [indices](#input\_indices) | A map of all indices to create. | <pre>map(object({<br> number_of_shards = optional(number)<br> number_of_replicas = optional(number)<br> refresh_interval = optional(string)<br> mappings = optional(any, {})<br> aliases = optional(any, {})<br> analysis_analyzer = optional(string)<br> analysis_char_filter = optional(string)<br> analysis_filter = optional(string)<br> analysis_normalizer = optional(string)<br> analysis_tokenizer = optional(string)<br> analyze_max_token_count = optional(string)<br> auto_expand_replicas = optional(string)<br> blocks_metadata = optional(bool)<br> blocks_read = optional(bool)<br> blocks_read_only = optional(bool)<br> blocks_read_only_allow_delete = optional(bool)<br> blocks_write = optional(bool)<br> codec = optional(string)<br> default_pipeline = optional(string)<br> gc_deletes = optional(string)<br> highlight_max_analyzed_offset = optional(string)<br> include_type_name = optional(string)<br> index_similarity_default = optional(string)<br> indexing_slowlog_level = optional(string)<br> indexing_slowlog_source = optional(string)<br> indexing_slowlog_threshold_index_debug = optional(string)<br> indexing_slowlog_threshold_index_info = optional(string)<br> indexing_slowlog_threshold_index_trace = optional(string)<br> indexing_slowlog_threshold_index_warn = optional(string)<br> load_fixed_bitset_filters_eagerly = optional(bool)<br> max_docvalue_fields_search = optional(string)<br> max_inner_result_window = optional(string)<br> max_ngram_diff = optional(string)<br> max_refresh_listeners = optional(string)<br> max_regex_length = optional(string)<br> max_rescore_window = optional(string)<br> max_result_window = optional(string)<br> max_script_fields = optional(string)<br> max_shingle_diff = optional(string)<br> max_terms_count = optional(string)<br> number_of_routing_shards = optional(string)<br> rollover_alias = optional(string)<br> routing_allocation_enable = optional(string)<br> routing_partition_size = optional(string)<br> routing_rebalance_enable = optional(string)<br> search_idle_after = optional(string)<br> search_slowlog_level = optional(string)<br> search_slowlog_threshold_fetch_info = optional(string)<br> search_slowlog_threshold_fetch_debug = optional(string)<br> search_slowlog_threshold_fetch_trace = optional(string)<br> search_slowlog_threshold_fetch_warn = optional(string)<br> search_slowlog_threshold_query_debug = optional(string)<br> search_slowlog_threshold_query_info = optional(string)<br> search_slowlog_threshold_query_trace = optional(string)<br> search_slowlog_threshold_query_warn = optional(string)<br> shard_check_on_startup = optional(string)<br> sort_field = optional(string)<br> sort_order = optional(string)<br> }))</pre> | `{}` | no |
| <a name="input_ism_policies"></a> [ism\_policies](#input\_ism\_policies) | A map of all ISM policies to create. | `map(any)` | `{}` | no |
| <a name="input_ism_policy_files"></a> [ism\_policy\_files](#input\_ism\_policy\_files) | A set of all ISM policy files to create. | `set(string)` | `[]` | no |
| <a name="input_log_streams_enabled"></a> [log\_streams\_enabled](#input\_log\_streams\_enabled) | Configuration for which log streams to enable sending logs to CloudWatch. | `map(string)` | <pre>{<br> "AUDIT_LOGS": "false",<br> "ES_APPLICATION_LOGS": "false",<br> "INDEX_SLOW_LOGS": "false",<br> "SEARCH_SLOW_LOGS": "false"<br>}</pre> | no |
Expand Down
67 changes: 60 additions & 7 deletions index.tf
Original file line number Diff line number Diff line change
@@ -1,13 +1,66 @@
resource "opensearch_index" "index" {
for_each = local.indices

name = each.key
number_of_shards = try(each.value.number_of_shards, "")
number_of_replicas = try(each.value.number_of_replicas, "")
refresh_interval = try(each.value.refresh_interval, "")
mappings = jsonencode(try(each.value.mappings, {}))
aliases = jsonencode(try(each.value.aliases, {}))
force_destroy = true
name = each.key
number_of_shards = try(each.value.number_of_shards, "")
number_of_replicas = try(each.value.number_of_replicas, "")
refresh_interval = try(each.value.refresh_interval, "")
mappings = jsonencode(try(each.value.mappings, {}))
aliases = jsonencode(try(each.value.aliases, {}))
analysis_analyzer = try(each.value.analysis_analyzer, null)
analysis_char_filter = try(each.value.analysis_char_filter, null)
analysis_filter = try(each.value.analysis_filter, null)
analysis_normalizer = try(each.value.analysis_normalizer, null)
analysis_tokenizer = try(each.value.analysis_tokenizer, null)
analyze_max_token_count = try(each.value.analyze_max_token_count, null)
auto_expand_replicas = try(each.value.auto_expand_replicas, null)
blocks_metadata = try(each.value.blocks_metadata, null)
blocks_read = try(each.value.blocks_read, null)
blocks_read_only = try(each.value.blocks_read_only, null)
blocks_read_only_allow_delete = try(each.value.blocks_read_only_allow_delete, null)
blocks_write = try(each.value.blocks_write, null)
codec = try(each.value.codec, null)
default_pipeline = try(each.value.default_pipeline, null)
gc_deletes = try(each.value.gc_deletes, null)
highlight_max_analyzed_offset = try(each.value.highlight_max_analyzed_offset, null)
include_type_name = try(each.value.include_type_name, null)
index_similarity_default = try(each.value.index_similarity_default, null)
indexing_slowlog_level = try(each.value.indexing_slowlog_level, null)
indexing_slowlog_source = try(each.value.indexing_slowlog_source, null)
indexing_slowlog_threshold_index_debug = try(each.value.indexing_slowlog_threshold_index_debug, null)
indexing_slowlog_threshold_index_info = try(each.value.indexing_slowlog_threshold_index_info, null)
indexing_slowlog_threshold_index_trace = try(each.value.indexing_slowlog_threshold_index_trace, null)
indexing_slowlog_threshold_index_warn = try(each.value.indexing_slowlog_threshold_index_warn, null)
load_fixed_bitset_filters_eagerly = try(each.value.load_fixed_bitset_filters_eagerly, null)
max_docvalue_fields_search = try(each.value.max_docvalue_fields_search, null)
max_inner_result_window = try(each.value.max_inner_result_window, null)
max_ngram_diff = try(each.value.max_ngram_diff, null)
max_refresh_listeners = try(each.value.max_refresh_listeners, null)
max_regex_length = try(each.value.max_regex_length, null)
max_rescore_window = try(each.value.max_rescore_window, null)
max_result_window = try(each.value.max_result_window, null)
max_script_fields = try(each.value.max_script_fields, null)
max_shingle_diff = try(each.value.max_shingle_diff, null)
max_terms_count = try(each.value.max_terms_count, null)
number_of_routing_shards = try(each.value.number_of_routing_shards, null)
rollover_alias = try(each.value.rollover_alias, null)
routing_allocation_enable = try(each.value.routing_allocation_enable, null)
routing_partition_size = try(each.value.routing_partition_size, null)
routing_rebalance_enable = try(each.value.routing_rebalance_enable, null)
search_idle_after = try(each.value.search_idle_after, null)
search_slowlog_level = try(each.value.search_slowlog_level, null)
search_slowlog_threshold_fetch_debug = try(each.value.search_slowlog_threshold_fetch_debug, null)
search_slowlog_threshold_fetch_info = try(each.value.search_slowlog_threshold_fetch_info, null)
search_slowlog_threshold_fetch_trace = try(each.value.search_slowlog_threshold_fetch_trace, null)
search_slowlog_threshold_fetch_warn = try(each.value.search_slowlog_threshold_fetch_warn, null)
search_slowlog_threshold_query_debug = try(each.value.search_slowlog_threshold_query_debug, null)
search_slowlog_threshold_query_info = try(each.value.search_slowlog_threshold_query_info, null)
search_slowlog_threshold_query_trace = try(each.value.search_slowlog_threshold_query_trace, null)
search_slowlog_threshold_query_warn = try(each.value.search_slowlog_threshold_query_warn, null)
shard_check_on_startup = try(each.value.shard_check_on_startup, null)
sort_field = try(each.value.sort_field, null)
sort_order = try(each.value.sort_order, null)
force_destroy = true

depends_on = [
opensearch_index_template.index_template,
Expand Down
63 changes: 61 additions & 2 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,67 @@ variable "ism_policy_files" {

variable "indices" {
description = "A map of all indices to create."
type = map(any)
default = {}
type = map(object({
number_of_shards = optional(number)
number_of_replicas = optional(number)
refresh_interval = optional(string)
mappings = optional(any, {})
aliases = optional(any, {})
analysis_analyzer = optional(string)
analysis_char_filter = optional(string)
analysis_filter = optional(string)
analysis_normalizer = optional(string)
analysis_tokenizer = optional(string)
analyze_max_token_count = optional(string)
auto_expand_replicas = optional(string)
blocks_metadata = optional(bool)
blocks_read = optional(bool)
blocks_read_only = optional(bool)
blocks_read_only_allow_delete = optional(bool)
blocks_write = optional(bool)
codec = optional(string)
default_pipeline = optional(string)
gc_deletes = optional(string)
highlight_max_analyzed_offset = optional(string)
include_type_name = optional(string)
index_similarity_default = optional(string)
indexing_slowlog_level = optional(string)
indexing_slowlog_source = optional(string)
indexing_slowlog_threshold_index_debug = optional(string)
indexing_slowlog_threshold_index_info = optional(string)
indexing_slowlog_threshold_index_trace = optional(string)
indexing_slowlog_threshold_index_warn = optional(string)
load_fixed_bitset_filters_eagerly = optional(bool)
max_docvalue_fields_search = optional(string)
max_inner_result_window = optional(string)
max_ngram_diff = optional(string)
max_refresh_listeners = optional(string)
max_regex_length = optional(string)
max_rescore_window = optional(string)
max_result_window = optional(string)
max_script_fields = optional(string)
max_shingle_diff = optional(string)
max_terms_count = optional(string)
number_of_routing_shards = optional(string)
rollover_alias = optional(string)
routing_allocation_enable = optional(string)
routing_partition_size = optional(string)
routing_rebalance_enable = optional(string)
search_idle_after = optional(string)
search_slowlog_level = optional(string)
search_slowlog_threshold_fetch_info = optional(string)
search_slowlog_threshold_fetch_debug = optional(string)
search_slowlog_threshold_fetch_trace = optional(string)
search_slowlog_threshold_fetch_warn = optional(string)
search_slowlog_threshold_query_debug = optional(string)
search_slowlog_threshold_query_info = optional(string)
search_slowlog_threshold_query_trace = optional(string)
search_slowlog_threshold_query_warn = optional(string)
shard_check_on_startup = optional(string)
sort_field = optional(string)
sort_order = optional(string)
}))
default = {}
}

variable "index_files" {
Expand Down
2 changes: 1 addition & 1 deletion versions.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
terraform {
required_version = ">= 1.0.0"
required_version = ">= 1.3.0"

required_providers {
aws = {
Expand Down

0 comments on commit 43c7233

Please sign in to comment.