From ddd14eb4595fa1aa51e164bc84aaf9160185e909 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 24 Jun 2024 12:29:49 +0200 Subject: [PATCH] allow using variables inside complex variables --- bundle/config/mutator/resolve_variable_references.go | 8 ++++++++ bundle/phases/initialize.go | 6 ++++-- bundle/tests/complex_variables_test.go | 4 ++++ bundle/tests/variables/complex/databricks.yml | 7 +++++-- libs/dyn/convert/from_typed.go | 2 +- 5 files changed, 22 insertions(+), 5 deletions(-) diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index f7fce6c821..218d5a65c6 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -31,6 +31,14 @@ func ResolveVariableReferencesInLookup() bundle.Mutator { }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables} } +func ResolveVariableReferencesInComplexVariables() bundle.Mutator { + return &resolveVariableReferences{prefixes: []string{ + "bundle", + "workspace", + "variables", + }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")), lookupFn: lookupForVariables} +} + func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { // Future opportunity: if we lookup this path in both the given root // and the synthesized root, we know if it was explicitly set or implied to be empty. diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index d96ee0ebf9..e24ce74e6b 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -29,10 +29,12 @@ func Initialize() bundle.Mutator { mutator.ExpandWorkspaceRoot(), mutator.DefineDefaultWorkspacePaths(), mutator.SetVariables(), - // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences - // and ResolveVariableReferences. See what is expected in PythonMutatorPhaseInit doc + // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences, + // ResolveVariableReferencesInComplexVariables and ResolveVariableReferences. + // See what is expected in PythonMutatorPhaseInit doc pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit), mutator.ResolveVariableReferencesInLookup(), + mutator.ResolveVariableReferencesInComplexVariables(), mutator.ResolveResourceReferences(), mutator.ResolveVariableReferences( "bundle", diff --git a/bundle/tests/complex_variables_test.go b/bundle/tests/complex_variables_test.go index e6e78a8b41..57724d3752 100644 --- a/bundle/tests/complex_variables_test.go +++ b/bundle/tests/complex_variables_test.go @@ -16,6 +16,7 @@ func TestComplexVariables(t *testing.T) { diags = bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), mutator.ResolveVariableReferences( "variables", ), @@ -23,6 +24,7 @@ func TestComplexVariables(t *testing.T) { require.NoError(t, diags.Error()) require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) @@ -44,6 +46,7 @@ func TestComplexVariablesOverride(t *testing.T) { diags = bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), mutator.ResolveVariableReferences( "variables", ), @@ -51,6 +54,7 @@ func TestComplexVariablesOverride(t *testing.T) { require.NoError(t, diags.Error()) require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) } diff --git a/bundle/tests/variables/complex/databricks.yml b/bundle/tests/variables/complex/databricks.yml index ecc4ecd94d..1a3bdb5c5d 100644 --- a/bundle/tests/variables/complex/databricks.yml +++ b/bundle/tests/variables/complex/databricks.yml @@ -13,12 +13,14 @@ resources: libraries: ${var.libraries} variables: + node_type: + default: "Standard_DS3_v2" cluster: type: complex description: "A cluster definition" default: spark_version: "13.2.x-scala2.11" - node_type_id: "Standard_DS3_v2" + node_type_id: ${var.node_type} num_workers: 2 spark_conf: spark.speculation: true @@ -36,9 +38,10 @@ targets: default: dev: variables: + node_type: "Standard_DS3_v3" cluster: spark_version: "14.2.x-scala2.11" - node_type_id: "Standard_DS3_v2" + node_type_id: ${var.node_type} num_workers: 4 spark_conf: spark.speculation: false diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index b561bc0b04..b772571f96 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -111,7 +111,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio var options []fromTypedOptions if v.Kind() == reflect.Interface { - options = append(options, includeZeroValuedScalars) + options = append(options, includeZeroValues) } // Convert the field taking into account the reference value (may be equal to config.NilValue).