From 6922b384e633ba707454510b90d5032ead5a9120 Mon Sep 17 00:00:00 2001 From: Gonzalo Arce Date: Mon, 9 Dec 2024 14:22:18 +0000 Subject: [PATCH] Fix import of index analysis objects (#225) Previously, when importing an existing index, the analysis configuration (analyzers, tokenizers, filters, char_filters, and normalizers) was not fully populated into the Terraform state. As a result, users would not see these analysis settings after import, leading to missing or incomplete configurations in state. This commit introduces logic to reconstruct nested analysis objects from the flattened `index.analysis.*` keys returned by OpenSearch on import. By converting these flattened keys back into a nested JSON structure, the imported index state now includes the analysis settings as users typically define them in their Terraform configuration. **Note**: This change may reveal differences for existing configurations if they rely on unquoted numeric values or trailing whitespace in analysis-related JSON. Such configurations may now produce diffs where they did not before, potentially resulting in forced replacements. Signed-off-by: Gonzalo Arce --- provider/resource_opensearch_index.go | 61 ++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 1 deletion(-) diff --git a/provider/resource_opensearch_index.go b/provider/resource_opensearch_index.go index 2c77e67..97702b7 100644 --- a/provider/resource_opensearch_index.go +++ b/provider/resource_opensearch_index.go @@ -802,6 +802,53 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error indexResourceDataFromSettings(settings, d) + // RECONSTRUCT ANALYSIS FIELDS FROM FLATTENED KEYS + analysisData := map[string]map[string]interface{}{ + "analyzer": {}, + "tokenizer": {}, + "filter": {}, + "char_filter": {}, + "normalizer": {}, + } + + for key, value := range settings { + if strings.HasPrefix(key, "index.analysis.") { + parts := strings.Split(strings.TrimPrefix(key, "index.analysis."), ".") + if len(parts) < 2 { + continue + } + + category := parts[0] // should be one of analyzer, tokenizer, filter, char_filter, normalizer + if _, ok := analysisData[category]; !ok { + continue + } + + subkeys := parts[1:] + insertIntoNestedMap(analysisData[category], subkeys, value) + } + } + + if len(analysisData["analyzer"]) > 0 { + analyzerJSON, _ := json.Marshal(analysisData["analyzer"]) + d.Set("analysis_analyzer", string(analyzerJSON)) + } + if len(analysisData["tokenizer"]) > 0 { + tokenizerJSON, _ := json.Marshal(analysisData["tokenizer"]) + d.Set("analysis_tokenizer", string(tokenizerJSON)) + } + if len(analysisData["filter"]) > 0 { + filterJSON, _ := json.Marshal(analysisData["filter"]) + d.Set("analysis_filter", string(filterJSON)) + } + if len(analysisData["char_filter"]) > 0 { + charFilterJSON, _ := json.Marshal(analysisData["char_filter"]) + d.Set("analysis_char_filter", string(charFilterJSON)) + } + if len(analysisData["normalizer"]) > 0 { + normalizerJSON, _ := json.Marshal(analysisData["normalizer"]) + d.Set("analysis_normalizer", string(normalizerJSON)) + } + var response *json.RawMessage var res *elastic7.Response var mappingsResponse map[string]interface{} @@ -838,7 +885,6 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error } err = d.Set("mappings", string(jsonString)) - if err != nil { return err } @@ -846,6 +892,19 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error return nil } +// This is used to rebuild nested analysis configuration (analyzers, tokenizers, filters, char_filters, normalizers) +// from the flattened `index.analysis.*` keys returned by OpenSearch on import. +func insertIntoNestedMap(m map[string]interface{}, keys []string, value interface{}) { + if len(keys) == 1 { + m[keys[0]] = value + return + } + if _, ok := m[keys[0]].(map[string]interface{}); !ok { + m[keys[0]] = map[string]interface{}{} + } + insertIntoNestedMap(m[keys[0]].(map[string]interface{}), keys[1:], value) +} + func updateAliases(index string, oldAliases, newAliases map[string]interface{}, meta interface{}) error { ctx := context.Background()