Skip to content

Commit

Permalink
allow using variables inside complex variables
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewnester committed Jun 24, 2024
1 parent 443b338 commit ddd14eb
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 5 deletions.
8 changes: 8 additions & 0 deletions bundle/config/mutator/resolve_variable_references.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,14 @@ func ResolveVariableReferencesInLookup() bundle.Mutator {
}, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables}
}

func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
return &resolveVariableReferences{prefixes: []string{
"bundle",
"workspace",
"variables",
}, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")), lookupFn: lookupForVariables}
}

func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
// Future opportunity: if we lookup this path in both the given root
// and the synthesized root, we know if it was explicitly set or implied to be empty.
Expand Down
6 changes: 4 additions & 2 deletions bundle/phases/initialize.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,12 @@ func Initialize() bundle.Mutator {
mutator.ExpandWorkspaceRoot(),
mutator.DefineDefaultWorkspacePaths(),
mutator.SetVariables(),
// Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences
// and ResolveVariableReferences. See what is expected in PythonMutatorPhaseInit doc
// Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences,
// ResolveVariableReferencesInComplexVariables and ResolveVariableReferences.
// See what is expected in PythonMutatorPhaseInit doc
pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit),
mutator.ResolveVariableReferencesInLookup(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveResourceReferences(),
mutator.ResolveVariableReferences(
"bundle",
Expand Down
4 changes: 4 additions & 0 deletions bundle/tests/complex_variables_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@ func TestComplexVariables(t *testing.T) {

diags = bundle.Apply(context.Background(), b, bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences(
"variables",
),
))
require.NoError(t, diags.Error())

require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])

Expand All @@ -44,13 +46,15 @@ func TestComplexVariablesOverride(t *testing.T) {

diags = bundle.Apply(context.Background(), b, bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences(
"variables",
),
))
require.NoError(t, diags.Error())

require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
}
7 changes: 5 additions & 2 deletions bundle/tests/variables/complex/databricks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@ resources:
libraries: ${var.libraries}

variables:
node_type:
default: "Standard_DS3_v2"
cluster:
type: complex
description: "A cluster definition"
default:
spark_version: "13.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
node_type_id: ${var.node_type}
num_workers: 2
spark_conf:
spark.speculation: true
Expand All @@ -36,9 +38,10 @@ targets:
default:
dev:
variables:
node_type: "Standard_DS3_v3"
cluster:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
node_type_id: ${var.node_type}
num_workers: 4
spark_conf:
spark.speculation: false
Expand Down
2 changes: 1 addition & 1 deletion libs/dyn/convert/from_typed.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio

var options []fromTypedOptions
if v.Kind() == reflect.Interface {
options = append(options, includeZeroValuedScalars)
options = append(options, includeZeroValues)
}

// Convert the field taking into account the reference value (may be equal to config.NilValue).
Expand Down

0 comments on commit ddd14eb

Please sign in to comment.