-
Notifications
You must be signed in to change notification settings - Fork 57
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add resource for UC schemas to DABs (#1413)
## Changes This PR adds support for UC Schemas to DABs. This allows users to define schemas for tables and other assets their pipelines/workflows create as part of the DAB, thus managing the life-cycle in the DAB. The first version has a couple of intentional limitations: 1. The owner of the schema will be the deployment user. Changing the owner of the schema is not allowed (yet). `run_as` will not be restricted for DABs containing UC schemas. Let's limit the scope of run_as to the compute identity used instead of ownership of data assets like UC schemas. 2. API fields that are present in the update API but not the create API. For example: enabling predictive optimization is not supported in the create schema API and thus is not available in DABs at the moment. ## Tests Manually and integration test. Manually verified the following work: 1. Development mode adds a "dev_" prefix. 2. Modified status is correctly computed in the `bundle summary` command. 3. Grants work as expected, for assigning privileges. 4. Variable interpolation works for the schema ID.
- Loading branch information
1 parent
5afcc25
commit 89c0af5
Showing
20 changed files
with
540 additions
and
13 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
package resources | ||
|
||
import ( | ||
"github.com/databricks/databricks-sdk-go/marshal" | ||
"github.com/databricks/databricks-sdk-go/service/catalog" | ||
) | ||
|
||
type Schema struct { | ||
// List of grants to apply on this schema. | ||
Grants []Grant `json:"grants,omitempty"` | ||
|
||
// Full name of the schema (catalog_name.schema_name). This value is read from | ||
// the terraform state after deployment succeeds. | ||
ID string `json:"id,omitempty" bundle:"readonly"` | ||
|
||
*catalog.CreateSchema | ||
|
||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` | ||
} | ||
|
||
func (s *Schema) UnmarshalJSON(b []byte) error { | ||
return marshal.Unmarshal(b, s) | ||
} | ||
|
||
func (s Schema) MarshalJSON() ([]byte, error) { | ||
return marshal.Marshal(s) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
package tfdyn | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
|
||
"github.com/databricks/cli/bundle/internal/tf/schema" | ||
"github.com/databricks/cli/libs/dyn" | ||
"github.com/databricks/cli/libs/dyn/convert" | ||
"github.com/databricks/cli/libs/log" | ||
) | ||
|
||
func convertSchemaResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) { | ||
// Normalize the output value to the target schema. | ||
v, diags := convert.Normalize(schema.ResourceSchema{}, vin) | ||
for _, diag := range diags { | ||
log.Debugf(ctx, "schema normalization diagnostic: %s", diag.Summary) | ||
} | ||
|
||
// We always set force destroy as it allows DABs to manage the lifecycle | ||
// of the schema. It's the responsibility of the CLI to ensure the user | ||
// is adequately warned when they try to delete a UC schema. | ||
vout, err := dyn.SetByPath(v, dyn.MustPathFromString("force_destroy"), dyn.V(true)) | ||
if err != nil { | ||
return dyn.InvalidValue, err | ||
} | ||
|
||
return vout, nil | ||
} | ||
|
||
type schemaConverter struct{} | ||
|
||
func (schemaConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error { | ||
vout, err := convertSchemaResource(ctx, vin) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
// Add the converted resource to the output. | ||
out.Schema[key] = vout.AsAny() | ||
|
||
// Configure grants for this resource. | ||
if grants := convertGrantsResource(ctx, vin); grants != nil { | ||
grants.Schema = fmt.Sprintf("${databricks_schema.%s.id}", key) | ||
out.Grants["schema_"+key] = grants | ||
} | ||
|
||
return nil | ||
} | ||
|
||
func init() { | ||
registerConverter("schemas", schemaConverter{}) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
package tfdyn | ||
|
||
import ( | ||
"context" | ||
"testing" | ||
|
||
"github.com/databricks/cli/bundle/config/resources" | ||
"github.com/databricks/cli/bundle/internal/tf/schema" | ||
"github.com/databricks/cli/libs/dyn" | ||
"github.com/databricks/cli/libs/dyn/convert" | ||
"github.com/databricks/databricks-sdk-go/service/catalog" | ||
"github.com/stretchr/testify/assert" | ||
"github.com/stretchr/testify/require" | ||
) | ||
|
||
func TestConvertSchema(t *testing.T) { | ||
var src = resources.Schema{ | ||
CreateSchema: &catalog.CreateSchema{ | ||
Name: "name", | ||
CatalogName: "catalog", | ||
Comment: "comment", | ||
Properties: map[string]string{ | ||
"k1": "v1", | ||
"k2": "v2", | ||
}, | ||
StorageRoot: "root", | ||
}, | ||
Grants: []resources.Grant{ | ||
{ | ||
Privileges: []string{"EXECUTE"}, | ||
Principal: "[email protected]", | ||
}, | ||
{ | ||
Privileges: []string{"RUN"}, | ||
Principal: "[email protected]", | ||
}, | ||
}, | ||
} | ||
|
||
vin, err := convert.FromTyped(src, dyn.NilValue) | ||
require.NoError(t, err) | ||
|
||
ctx := context.Background() | ||
out := schema.NewResources() | ||
err = schemaConverter{}.Convert(ctx, "my_schema", vin, out) | ||
require.NoError(t, err) | ||
|
||
// Assert equality on the schema | ||
assert.Equal(t, map[string]any{ | ||
"name": "name", | ||
"catalog_name": "catalog", | ||
"comment": "comment", | ||
"properties": map[string]any{ | ||
"k1": "v1", | ||
"k2": "v2", | ||
}, | ||
"force_destroy": true, | ||
"storage_root": "root", | ||
}, out.Schema["my_schema"]) | ||
|
||
// Assert equality on the grants | ||
assert.Equal(t, &schema.ResourceGrants{ | ||
Schema: "${databricks_schema.my_schema.id}", | ||
Grant: []schema.ResourceGrantsGrant{ | ||
{ | ||
Privileges: []string{"EXECUTE"}, | ||
Principal: "[email protected]", | ||
}, | ||
{ | ||
Privileges: []string{"RUN"}, | ||
Principal: "[email protected]", | ||
}, | ||
}, | ||
}, out.Grants["schema_my_schema"]) | ||
} |
Oops, something went wrong.