Skip to content

Commit

Permalink
Fix import of index analysis objects (#225)
Browse files Browse the repository at this point in the history
Previously, when importing an existing index, the analysis configuration
(analyzers, tokenizers, filters, char_filters, and normalizers) was not
fully populated into the Terraform state. As a result, users would not see
these analysis settings after import, leading to missing or incomplete
configurations in state.

This commit introduces logic to reconstruct nested analysis objects from
the flattened `index.analysis.*` keys returned by OpenSearch on import. By
converting these flattened keys back into a nested JSON structure, the
imported index state now includes the analysis settings as users typically
define them in their Terraform configuration.

**Note**: This change may reveal differences for existing configurations if
they rely on unquoted numeric values or trailing whitespace in
analysis-related JSON. Such configurations may now produce diffs where they
did not before, potentially resulting in forced replacements.

Signed-off-by: Gonzalo Arce <[email protected]>
  • Loading branch information
gonz-cint committed Dec 11, 2024
1 parent 790834e commit d3e151f
Show file tree
Hide file tree
Showing 2 changed files with 147 additions and 0 deletions.
60 changes: 60 additions & 0 deletions provider/resource_opensearch_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -802,6 +802,53 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error

indexResourceDataFromSettings(settings, d)

// Reconstruct analysis fields from flattened keys
analysisData := map[string]map[string]interface{}{
"analyzer": {},
"tokenizer": {},
"filter": {},
"char_filter": {},
"normalizer": {},
}

for key, value := range settings {
if strings.HasPrefix(key, "index.analysis.") {
parts := strings.Split(strings.TrimPrefix(key, "index.analysis."), ".")
if len(parts) < 2 {
continue
}

category := parts[0] // should be one of analyzer, tokenizer, filter, char_filter, normalizer
if _, ok := analysisData[category]; !ok {
continue
}

subkeys := parts[1:]
insertIntoNestedMap(analysisData[category], subkeys, value)
}
}

if len(analysisData["analyzer"]) > 0 {
analyzerJSON, _ := json.Marshal(analysisData["analyzer"])
d.Set("analysis_analyzer", string(analyzerJSON))
}
if len(analysisData["tokenizer"]) > 0 {
tokenizerJSON, _ := json.Marshal(analysisData["tokenizer"])
d.Set("analysis_tokenizer", string(tokenizerJSON))
}
if len(analysisData["filter"]) > 0 {
filterJSON, _ := json.Marshal(analysisData["filter"])
d.Set("analysis_filter", string(filterJSON))
}
if len(analysisData["char_filter"]) > 0 {
charFilterJSON, _ := json.Marshal(analysisData["char_filter"])
d.Set("analysis_char_filter", string(charFilterJSON))
}
if len(analysisData["normalizer"]) > 0 {
normalizerJSON, _ := json.Marshal(analysisData["normalizer"])
d.Set("analysis_normalizer", string(normalizerJSON))
}

var response *json.RawMessage
var res *elastic7.Response
var mappingsResponse map[string]interface{}
Expand Down Expand Up @@ -846,6 +893,19 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error
return nil
}

// This is used to rebuild nested analysis configuration (analyzers, tokenizers, filters, char_filters, normalizers)
// from the flattened `index.analysis.*` keys returned by OpenSearch on import.
func insertIntoNestedMap(m map[string]interface{}, keys []string, value interface{}) {
if len(keys) == 1 {
m[keys[0]] = value
return
}
if _, ok := m[keys[0]].(map[string]interface{}); !ok {
m[keys[0]] = map[string]interface{}{}
}
insertIntoNestedMap(m[keys[0]].(map[string]interface{}), keys[1:], value)
}

func updateAliases(index string, oldAliases, newAliases map[string]interface{}, meta interface{}) error {
ctx := context.Background()

Expand Down
87 changes: 87 additions & 0 deletions provider/resource_opensearch_index_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,52 @@ resource "opensearch_index" "test" {
depends_on = [opensearch_index_template.test]
}
`
testAccOpensearchIndexImportAnalysis = `
resource "opensearch_index" "test_import_analysis" {
name = "terraform-test-import-analysis"
number_of_shards = 1
number_of_replicas = 1
analysis_analyzer = jsonencode({
custom_analyzer = {
type = "custom"
tokenizer = "standard"
filter = ["lowercase", "asciifolding"]
}
})
analysis_filter = jsonencode({
my_shingle_filter = {
type = "shingle"
max_shingle_size = 2
min_shingle_size = 2
output_unigrams = false
}
})
analysis_tokenizer = jsonencode({
my_ngram_tokenizer = {
type = "ngram"
min_gram = "3"
max_gram = "4"
}
})
analysis_char_filter = jsonencode({
my_char_filter_apostrophe = {
type = "mapping"
mappings = ["'=>"]
}
})
analysis_normalizer = jsonencode({
my_normalizer = {
type = "custom"
filter = ["lowercase", "asciifolding"]
}
})
}
`
)

Expand Down Expand Up @@ -788,3 +834,44 @@ func checkOpensearchAliasDeleted(indexName, aliasName string) resource.TestCheck
return nil
}
}

func TestAccOpensearchIndex_importAnalysis(t *testing.T) {
resourceName := "opensearch_index.test_import_analysis"
indexName := "terraform-test-import-analysis"

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: checkOpensearchIndexDestroy,
Steps: []resource.TestStep{
// Step 1: Create the index with analysis fields
{
Config: testAccOpensearchIndexImportAnalysis,
Check: resource.ComposeTestCheckFunc(
checkOpensearchIndexExists(resourceName),
),
},
// Step 2: Import the index
{
ResourceName: resourceName,
ImportState: true,
ImportStateId: indexName,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{
"force_destroy",
},
},
// Step 3: Re-run the same config and ensure no diffs appear
{
Config: testAccOpensearchIndexImportAnalysis,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "analysis_analyzer", `{"custom_analyzer":{"filter":["lowercase","asciifolding"],"tokenizer":"standard","type":"custom"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_filter", `{"my_shingle_filter":{"max_shingle_size":"2","min_shingle_size":"2","output_unigrams":false,"type":"shingle"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_tokenizer", `{"my_ngram_tokenizer":{"max_gram":"4","min_gram":"3","type":"ngram"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_char_filter", `{"my_char_filter_apostrophe":{"mappings":["'=>"],"type":"mapping"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_normalizer", `{"my_normalizer":{"filter":["lowercase","asciifolding"],"type":"custom"}}`),
),
},
},
})
}

0 comments on commit d3e151f

Please sign in to comment.