From 27bd46db0d17fc6da14582c32e2a89fef50569c3 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Wed, 30 Oct 2024 18:15:21 -0600 Subject: [PATCH 01/10] started working on function resource. --- .../resources/catalog/resource_function.go | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 internal/providers/pluginfw/resources/catalog/resource_function.go diff --git a/internal/providers/pluginfw/resources/catalog/resource_function.go b/internal/providers/pluginfw/resources/catalog/resource_function.go new file mode 100644 index 000000000..b30e868aa --- /dev/null +++ b/internal/providers/pluginfw/resources/catalog/resource_function.go @@ -0,0 +1,45 @@ +package catalog + +import ( + "context" + + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +const resourceName = "function" + +var _ resource.ResourceWithConfigure = &FunctionResource{} + +func ResourceFunction() resource.Resource { + return &FunctionResource{} +} + +type FunctionResource struct { + Client *common.DatabricksClient +} + +func (r *FunctionResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) +} + +// TODO: Update as needed to fit the requirements of the resource. +func (r *FunctionResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + attrs, blocks := tfschema.ResourceStructToSchemaMap(catalog_tf.FunctionInfo{}, nil) + + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks Functions", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *FunctionResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if r.Client == nil && req.ProviderData != nil { + r.Client = pluginfwcommon.ConfigureResource(req, resp) + } +} From f94dfd885f9206bb021d3097b91786a8d2b29900 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Thu, 31 Oct 2024 16:43:25 -0600 Subject: [PATCH 02/10] started working on CRUD operations for functions resource. --- .../resources/catalog/resource_function.go | 30 ++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/internal/providers/pluginfw/resources/catalog/resource_function.go b/internal/providers/pluginfw/resources/catalog/resource_function.go index b30e868aa..85acffa81 100644 --- a/internal/providers/pluginfw/resources/catalog/resource_function.go +++ b/internal/providers/pluginfw/resources/catalog/resource_function.go @@ -5,8 +5,10 @@ import ( "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" ) @@ -32,7 +34,7 @@ func (r *FunctionResource) Schema(ctx context.Context, req resource.SchemaReques attrs, blocks := tfschema.ResourceStructToSchemaMap(catalog_tf.FunctionInfo{}, nil) resp.Schema = schema.Schema{ - Description: "Terraform schema for Databricks Functions", + Description: "Terraform schema for Databricks Function", Attributes: attrs, Blocks: blocks, } @@ -43,3 +45,29 @@ func (r *FunctionResource) Configure(ctx context.Context, req resource.Configure r.Client = pluginfwcommon.ConfigureResource(req, resp) } } + +/* Is there a way I can make this general, accept any type of Response? There seems to be no base class that relates them... */ +func AppendDiagAndCheckErrors(resp *resource.CreateResponse, diags diag.Diagnostics) bool { + resp.Diagnostics.Append(diags...) + return resp.Diagnostics.HasError() +} + +func (r *FunctionResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := r.Client.GetWorkspaceClient() + if AppendDiagAndCheckErrors(resp, diags) { + return + } +} + +func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + /* TODO */ +} + +func (r *FunctionResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + /* TODO */ +} + +func (r *FunctionResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + /* TODO */ +} From 41d8d8fb2dfcf5428c674b776c12464159422466 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Thu, 31 Oct 2024 20:25:21 -0600 Subject: [PATCH 03/10] Added Read and Update methods for function resource --- .../resources/catalog/resource_function.go | 76 ++++++++++++++++--- 1 file changed, 66 insertions(+), 10 deletions(-) diff --git a/internal/providers/pluginfw/resources/catalog/resource_function.go b/internal/providers/pluginfw/resources/catalog/resource_function.go index 85acffa81..d5586bafe 100644 --- a/internal/providers/pluginfw/resources/catalog/resource_function.go +++ b/internal/providers/pluginfw/resources/catalog/resource_function.go @@ -3,9 +3,12 @@ package catalog import ( "context" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -46,28 +49,81 @@ func (r *FunctionResource) Configure(ctx context.Context, req resource.Configure } } -/* Is there a way I can make this general, accept any type of Response? There seems to be no base class that relates them... */ -func AppendDiagAndCheckErrors(resp *resource.CreateResponse, diags diag.Diagnostics) bool { - resp.Diagnostics.Append(diags...) - return resp.Diagnostics.HasError() +func AppendDiagAndCheckErrors(resp *diag.Diagnostics, diags diag.Diagnostics) bool { + resp.Append(diags...) + return resp.HasError() } func (r *FunctionResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + +} + +func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) w, diags := r.Client.GetWorkspaceClient() - if AppendDiagAndCheckErrors(resp, diags) { + if AppendDiagAndCheckErrors(&resp.Diagnostics, diags) { return } -} -func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - /* TODO */ + var planFunc catalog_tf.FunctionInfo + if AppendDiagAndCheckErrors(&resp.Diagnostics, req.Plan.Get(ctx, &planFunc)) { + return + } + + var updateReq catalog.UpdateFunction + + if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.TfSdkToGoSdkStruct(ctx, planFunc, &updateReq)) { + return + } + + funcInfo, err := w.Functions.Update(ctx, updateReq) + if err != nil { + resp.Diagnostics.AddError("failed to update function", err.Error()) + } + + if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)) { + return + } + + if AppendDiagAndCheckErrors(&resp.Diagnostics, resp.State.Set(ctx, funcInfo)) { + return + } } func (r *FunctionResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - /* TODO */ + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + w, diags := r.Client.GetWorkspaceClient() + if AppendDiagAndCheckErrors(&resp.Diagnostics, diags) { + return + } + + var stateFunc catalog_tf.FunctionInfo + if AppendDiagAndCheckErrors(&resp.Diagnostics, req.State.Get(ctx, &stateFunc)) { + return + } + + funcName := stateFunc.Name.ValueString() + + funcInfo, err := w.Functions.GetByName(ctx, funcName) + if err != nil { + if apierr.IsMissing(err) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError("failed to get function", err.Error()) + return + } + + if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.GoSdkToTfSdkStruct(ctx, funcInfo, &stateFunc)) { + return + } + + if AppendDiagAndCheckErrors(&resp.Diagnostics, resp.State.Set(ctx, stateFunc)) { + return + } } func (r *FunctionResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - /* TODO */ + } From 2213438663ebb45f3a27a5e5fb58e25451253059 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Fri, 1 Nov 2024 20:25:08 -0600 Subject: [PATCH 04/10] added all crud methods for function resource --- .../resources/catalog/resource_function.go | 133 +++++++++++++++--- 1 file changed, 115 insertions(+), 18 deletions(-) diff --git a/internal/providers/pluginfw/resources/catalog/resource_function.go b/internal/providers/pluginfw/resources/catalog/resource_function.go index d5586bafe..de072d4d9 100644 --- a/internal/providers/pluginfw/resources/catalog/resource_function.go +++ b/internal/providers/pluginfw/resources/catalog/resource_function.go @@ -2,8 +2,12 @@ package catalog import ( "context" + "fmt" + "time" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/retries" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" @@ -12,6 +16,7 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" ) @@ -24,6 +29,28 @@ func ResourceFunction() resource.Resource { return &FunctionResource{} } +func waitForFunction(ctx context.Context, w *databricks.WorkspaceClient, funcInfo *catalog.FunctionInfo) diag.Diagnostics { + const timeout = 5 * time.Minute + + result, err := retries.Poll[catalog.FunctionInfo](ctx, timeout, func() (*catalog.FunctionInfo, *retries.Err) { + attempt, err := w.Functions.GetByName(ctx, funcInfo.FullName) + if err != nil { + if apierr.IsMissing(err) { + return nil, retries.Continue(fmt.Errorf("function %s is not yet available", funcInfo.FullName)) + } + return nil, retries.Halt(fmt.Errorf("failed to get function: %s", err)) + } + return attempt, nil + }) + + if err != nil { + return diag.Diagnostics{diag.NewErrorDiagnostic("failed to create function", err.Error())} + } + + *funcInfo = *result + return nil +} + type FunctionResource struct { Client *common.DatabricksClient } @@ -32,9 +59,25 @@ func (r *FunctionResource) Metadata(ctx context.Context, req resource.MetadataRe resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } -// TODO: Update as needed to fit the requirements of the resource. func (r *FunctionResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - attrs, blocks := tfschema.ResourceStructToSchemaMap(catalog_tf.FunctionInfo{}, nil) + attrs, blocks := tfschema.ResourceStructToSchemaMap(catalog_tf.FunctionInfo{}, func(c tfschema.CustomizableSchema) tfschema.CustomizableSchema { + c.SetRequired("name") + c.SetRequired("catalog_name") + c.SetRequired("schema_name") + c.SetRequired("input_params") + c.SetRequired("data_type") + c.SetRequired("routine_body") + c.SetRequired("routine_defintion") + c.SetRequired("language") + + c.SetReadOnly("full_name") + c.SetReadOnly("created_at") + c.SetReadOnly("created_by") + c.SetReadOnly("updated_at") + c.SetReadOnly("updated_by") + + return c + }) resp.Schema = schema.Schema{ Description: "Terraform schema for Databricks Function", @@ -49,30 +92,67 @@ func (r *FunctionResource) Configure(ctx context.Context, req resource.Configure } } -func AppendDiagAndCheckErrors(resp *diag.Diagnostics, diags diag.Diagnostics) bool { - resp.Append(diags...) - return resp.HasError() +func (r *FunctionResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("full_name"), req, resp) } func (r *FunctionResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := r.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var planFunc catalog_tf.FunctionInfo + resp.Diagnostics.Append(req.Plan.Get(ctx, &planFunc)...) + if resp.Diagnostics.HasError() { + return + } + + var createReq catalog.CreateFunctionRequest + + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, planFunc, &createReq)...) + if resp.Diagnostics.HasError() { + return + } + + funcInfo, err := w.Functions.Create(ctx, createReq) + if err != nil { + resp.Diagnostics.AddError("failed to create function", err.Error()) + } + + resp.Diagnostics.Append(waitForFunction(ctx, w, funcInfo)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, planFunc)...) } func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) w, diags := r.Client.GetWorkspaceClient() - if AppendDiagAndCheckErrors(&resp.Diagnostics, diags) { + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { return } var planFunc catalog_tf.FunctionInfo - if AppendDiagAndCheckErrors(&resp.Diagnostics, req.Plan.Get(ctx, &planFunc)) { + resp.Diagnostics.Append(req.Plan.Get(ctx, &planFunc)...) + if resp.Diagnostics.HasError() { return } var updateReq catalog.UpdateFunction - if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.TfSdkToGoSdkStruct(ctx, planFunc, &updateReq)) { + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, planFunc, &updateReq)...) + if resp.Diagnostics.HasError() { return } @@ -81,25 +161,27 @@ func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateReques resp.Diagnostics.AddError("failed to update function", err.Error()) } - if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)) { + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)...) + if resp.Diagnostics.HasError() { return } - if AppendDiagAndCheckErrors(&resp.Diagnostics, resp.State.Set(ctx, funcInfo)) { - return - } + resp.Diagnostics.Append(resp.State.Set(ctx, planFunc)...) } func (r *FunctionResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) w, diags := r.Client.GetWorkspaceClient() - if AppendDiagAndCheckErrors(&resp.Diagnostics, diags) { + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { return } var stateFunc catalog_tf.FunctionInfo - if AppendDiagAndCheckErrors(&resp.Diagnostics, req.State.Get(ctx, &stateFunc)) { + + resp.Diagnostics.Append(req.State.Get(ctx, &stateFunc)...) + if resp.Diagnostics.HasError() { return } @@ -115,15 +197,30 @@ func (r *FunctionResource) Read(ctx context.Context, req resource.ReadRequest, r return } - if AppendDiagAndCheckErrors(&resp.Diagnostics, converters.GoSdkToTfSdkStruct(ctx, funcInfo, &stateFunc)) { + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, funcInfo, &stateFunc)...) + if resp.Diagnostics.HasError() { return } - if AppendDiagAndCheckErrors(&resp.Diagnostics, resp.State.Set(ctx, stateFunc)) { - return - } + resp.Diagnostics.Append(resp.State.Set(ctx, stateFunc)...) } func (r *FunctionResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := r.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + var deleteReq catalog_tf.DeleteFunctionRequest + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("full_name"), &deleteReq.Name)...) + if resp.Diagnostics.HasError() { + return + } + + err := w.Functions.DeleteByName(ctx, deleteReq.Name.ValueString()) + if err != nil && !apierr.IsMissing(err) { + resp.Diagnostics.AddError("failed to delete function", err.Error()) + } } From feba4e00d943301f1a26ca9c460c31938a60ff71 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Wed, 6 Nov 2024 18:46:43 -0600 Subject: [PATCH 05/10] added acceptance tests for databricks_function resource --- .../catalog/resource_function_acc_test.go | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 internal/providers/pluginfw/resources/catalog/resource_function_acc_test.go diff --git a/internal/providers/pluginfw/resources/catalog/resource_function_acc_test.go b/internal/providers/pluginfw/resources/catalog/resource_function_acc_test.go new file mode 100644 index 000000000..83053a693 --- /dev/null +++ b/internal/providers/pluginfw/resources/catalog/resource_function_acc_test.go @@ -0,0 +1,56 @@ +package catalog_test + +import ( + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/stretchr/testify/require" +) + +func CheckFunctionResourcePopulated(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + _, ok := s.Modules[0].Resources["databricks_function.function"] + require.True(t, ok, "databricks_function.function has to be in the Terraform state") + return nil + } +} + +func TestFunctionResource(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: ` + resource "databricks_catalog" "sandbox" { + name = "sandbox-${var.STICKY_RANDOM}" + } + + resource "databricks_schema" "functions" { + catalog_name = databricks_catalog.sandbox.id + name = "functions-${var.STICKY_RANDOM}" + } + + resource "databricks_function" "function" { + name = "function-${var.STICKY_RANDOM}" + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.functions.name + input_params = [ + { + name = "weight" + type = "DOUBLE" + }, + { + name = "height" + type = "DOUBLE" + } + ] + data_type = "DOUBLE" + routine_body = "SQL" + routine_defintion = "weight / (height * height)" + language = "SQL" + is_deterministic = true + sql_data_access = "CONTAINS_SQL" + security_type = "DEFINER" + } + `, + Check: CheckFunctionResourcePopulated(t), + }) +} From 2a46c0524c5ec32f90959701240f015ab209eb8e Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Wed, 6 Nov 2024 19:09:15 -0600 Subject: [PATCH 06/10] added documentation for the databricks_function resource --- docs/resources/functions.md | 112 ++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 docs/resources/functions.md diff --git a/docs/resources/functions.md b/docs/resources/functions.md new file mode 100644 index 000000000..41a346a2d --- /dev/null +++ b/docs/resources/functions.md @@ -0,0 +1,112 @@ +--- +subcategory: "Unity Catalog" +--- +# databricks_functionss Resource + +-> This resource source can only be used with a workspace-level provider. + +Creates a [User-Defined Function (UDF)](https://docs.databricks.com/en/udf/unity-catalog.html) in Unity Catalog. UDFs can be defined using SQL, or external languages (e.g., Python) and are stored within [Unity Catalog schemas](../resources/schema.md). + +## Example Usage + +### SQL-based function: + +```hcl +resource "databricks_catalog" "sandbox" { + name = "sandbox_example" + comment = "Catalog managed by Terraform" +} + +resource "databricks_schema" "functions" { + catalog_name = databricks_catalog.sandbox.name + name = "functions_example" + comment = "Schema managed by Terraform" +} + +resource "databricks_function" "calculate_bmi" { + name = "calculate_bmi" + catalog_name = databricks_catalog.sandbox.name + schema_name = databricks_schema.functions.name + input_params = [ + { + name = "weight" + type = "DOUBLE" + }, + { + name = "height" + type = "DOUBLE" + } + ] + data_type = "DOUBLE" + routine_body = "SQL" + routine_definition = "weight / (height * height)" + language = "SQL" + is_deterministic = true + sql_data_access = "CONTAINS_SQL" + security_type = "DEFINER" +} +``` + +### Python-based function: + +```hcl +resource "databricks_function" "calculate_bmi_py" { + name = "calculate_bmi_py" + catalog_name = databricks_catalog.sandbox.name + schema_name = databricks_schema.functions.name + input_params = [ + { + name = "weight_kg" + type = "DOUBLE" + }, + { + name = "height_m" + type = "DOUBLE" + } + ] + data_type = "DOUBLE" + routine_body = "EXTERNAL" + routine_definition = "return weight_kg / (height_m ** 2)" + language = "Python" + is_deterministic = false + sql_data_access = "NO_SQL" + security_type = "DEFINER" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the function. +* `catalog_name` - (Required) The name of the parent [databricks_catalog](../resources/catalog.md). +* `schema_name` - (Required) The name of [databricks_schema](../resources/schema.md) where the function will reside. +* `input_params` - (Required) A list of objects specifying the input parameters for the function. + * `name` - (Required) The name of the parameter. + * `type` - (Required) The data type of the parameter (e.g., `DOUBLE`, `INT`, etc.). +* `data_type` - (Required) The return data type of the function (e.g., `DOUBLE`). +* `routine_body` - (Required) Specifies the body type of the function, either `SQL` for SQL-based functions or `EXTERNAL` for functions in external languages. +* `routine_definition` - (Required) The actual definition of the function, expressed in SQL or the specified external language. +* `language` - (Required) The language of the function, e.g., `SQL` or `Python`. +* `is_deterministic`- (Optional, `bool`) Whether the function is deterministic. Default is `true`. +* `sql_data_Access`- (Optional) The SQL data access level for the function. Possible values are: + * `CONTAINS_SQL` - The function contains SQL statements. + * `READS_SQL_DATA` - The function reads SQL data but does not modify it. + * `NO_SQL` - The function does not contain SQL. +* `security_type` - (Optional) The security type of the function, generally `DEFINER`. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: +* `full_name` - Full name of the function in the form of `catalog_name.schema_name.function_name`. +* `created_at` - The time when this function was created, in epoch milliseconds. +* `created_by` - The username of the function's creator. +* `updated_at` - The time when this function was last updated, in epoch milliseconds. +* `updated_by` - The username of the last user to modify the function. + +## Related Resources + +The following resources are used in the same context: + +* [databricks_schema](./schema.md) to get information about a single schema +* Data source [databricks_functions](../data-sources/functions.md) to get a list of functions under a specified location. From f0b26224022f09addbe0dbc902911ad0e2501015 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Wed, 6 Nov 2024 19:12:07 -0600 Subject: [PATCH 07/10] fixed typos in docs and added resource to the rollout utils --- docs/resources/{functions.md => function.md} | 2 +- internal/providers/pluginfw/pluginfw_rollout_utils.go | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) rename docs/resources/{functions.md => function.md} (99%) diff --git a/docs/resources/functions.md b/docs/resources/function.md similarity index 99% rename from docs/resources/functions.md rename to docs/resources/function.md index 41a346a2d..803557ccb 100644 --- a/docs/resources/functions.md +++ b/docs/resources/function.md @@ -1,7 +1,7 @@ --- subcategory: "Unity Catalog" --- -# databricks_functionss Resource +# databricks_function Resource -> This resource source can only be used with a workspace-level provider. diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 90b782a51..cdeec8393 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -39,6 +39,7 @@ var migratedDataSources = []func() datasource.DataSource{ var pluginFwOnlyResources = []func() resource.Resource{ // TODO Add resources here sharing.ResourceShare, // Using the staging name (with pluginframework suffix) + catalog.ResourceFunction, } // List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2. From 64b136cc4b871ed9d93430fc83e1f2fa8abd0f70 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Mon, 2 Dec 2024 12:03:48 -0600 Subject: [PATCH 08/10] added requested changes and included an update pathway in the tests --- .../products/catalog/resource_function.go | 2 + .../catalog/resource_function_acc_test.go | 53 ++++++++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/internal/providers/pluginfw/products/catalog/resource_function.go b/internal/providers/pluginfw/products/catalog/resource_function.go index de072d4d9..d303d11a1 100644 --- a/internal/providers/pluginfw/products/catalog/resource_function.go +++ b/internal/providers/pluginfw/products/catalog/resource_function.go @@ -120,6 +120,7 @@ func (r *FunctionResource) Create(ctx context.Context, req resource.CreateReques funcInfo, err := w.Functions.Create(ctx, createReq) if err != nil { resp.Diagnostics.AddError("failed to create function", err.Error()) + return } resp.Diagnostics.Append(waitForFunction(ctx, w, funcInfo)...) @@ -159,6 +160,7 @@ func (r *FunctionResource) Update(ctx context.Context, req resource.UpdateReques funcInfo, err := w.Functions.Update(ctx, updateReq) if err != nil { resp.Diagnostics.AddError("failed to update function", err.Error()) + return } resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)...) diff --git a/internal/providers/pluginfw/products/catalog/resource_function_acc_test.go b/internal/providers/pluginfw/products/catalog/resource_function_acc_test.go index 83053a693..64bc50225 100644 --- a/internal/providers/pluginfw/products/catalog/resource_function_acc_test.go +++ b/internal/providers/pluginfw/products/catalog/resource_function_acc_test.go @@ -16,6 +16,16 @@ func CheckFunctionResourcePopulated(t *testing.T) func(s *terraform.State) error } } +func CheckFunctionResourceUpdated(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + resource, ok := s.Modules[0].Resources["databricks_function.function"] + require.True(t, ok, "databricks_function.function must exist after update") + require.Equal(t, "DOUBLE", resource.Primary.Attributes["data_type"]) + require.Contains(t, resource.Primary.Attributes["routine_defintion"], "age / 100") + return nil + } +} + func TestFunctionResource(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: ` @@ -52,5 +62,46 @@ func TestFunctionResource(t *testing.T) { } `, Check: CheckFunctionResourcePopulated(t), - }) + }, + acceptance.Step{ + Template: ` + resource "databricks_catalog" "sandbox" { + name = "sandbox-${var.STICKY_RANDOM}" + } + + resource "databricks_schema" "functions" { + catalog_name = databricks_catalog.sandbox.id + name = "functions-${var.STICKY_RANDOM}" + } + + resource "databricks_function" "function" { + name = "function-${var.STICKY_RANDOM}" + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.functions.name + input_params = [ + { + name = "weight" + type = "DOUBLE" + }, + { + name = "height" + type = "DOUBLE" + }, + { + name = "age" + type = "INT" + } + ] + data_type = "DOUBLE" + routine_body = "SQL" + routine_defintion = "weight / (height * height) + age / 100" + language = "SQL" + is_deterministic = true + sql_data_access = "CONTAINS_SQL" + security_type = "DEFINER" + } + `, + Check: CheckFunctionResourceUpdated(t), + }, + ) } From d23cdc4aff6e1db168508f955c7d1a316e56e69d Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Mon, 2 Dec 2024 12:32:43 -0600 Subject: [PATCH 09/10] added missing required attributes and finished arugment reference on docs --- docs/resources/function.md | 37 ++++++++++++++++--- .../products/catalog/resource_function.go | 10 ++++- 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/docs/resources/function.md b/docs/resources/function.md index 803557ccb..a99e0ee70 100644 --- a/docs/resources/function.md +++ b/docs/resources/function.md @@ -85,15 +85,42 @@ The following arguments are supported: * `name` - (Required) The name of the parameter. * `type` - (Required) The data type of the parameter (e.g., `DOUBLE`, `INT`, etc.). * `data_type` - (Required) The return data type of the function (e.g., `DOUBLE`). -* `routine_body` - (Required) Specifies the body type of the function, either `SQL` for SQL-based functions or `EXTERNAL` for functions in external languages. +* `full_data_type` - (Required) Pretty printed function data type (e.g. `string`). +* `return_params` - (Optional) A list of objects specifying the function's return parameters. + * `parameters` - (Required) An array of objects describing the function's return parameters. Each object includes: + * `name` - (Required) The name of the return parameter. + * `type_text` - (Required) The full data type specification as SQL/catalog string text. + * `type_json` - The full data type specification as JSON-serialized text. + * `type_name` - (Required) The name of the data type (e.g., `BOOLEAN`, `INT`, `STRING`, etc.). + * `type_precision` - (Required for `DecimalTypes`) Digits of precision for the type. + * `type_scale` - (Required for `DecimalTypes`) Digits to the right of the decimal for the type. + * `type_interval_type` - The format of `IntervalType`. + * `position` - (Required) The ordinal position of the parameter (starting at 0). + * `parameter_mode` - The mode of the parameter. Possible value: `IN`. + * `parameter_type` - The type of the parameter. Possible values: + * `PARAM` - Represents a generic parameter. + * `COLUMN` - Represents a column parameter. + * `parameter_default` - The default value for the parameter, if any. + * `comment` - User-provided free-form text description of the parameter. * `routine_definition` - (Required) The actual definition of the function, expressed in SQL or the specified external language. -* `language` - (Required) The language of the function, e.g., `SQL` or `Python`. -* `is_deterministic`- (Optional, `bool`) Whether the function is deterministic. Default is `true`. -* `sql_data_Access`- (Optional) The SQL data access level for the function. Possible values are: +* `routine_dependencies` - (Optional) A list of objects specifying the function's dependencies. Each object includes: + * `dependencies` - (Optional) An array of objects describing the dependencies. Each object includes: + * `table` - (Optional) An object representing a table that is dependent on the SQL object. + * `function` - (Optional) An object representing a function that is dependent on the SQL object. +* `is_deterministic`- (Required, `bool`) Whether the function is deterministic. Default is `true`. +* `is_null_call` - (Required, `bool`) Indicates whether the function should handle `NULL` input arguments explicitly. +* `specific_name` - (Required) Specific name of the function. Reserverd for future use. +* `external_name` - (Optional) External function name. +* `sql_path` - (Optional) The fully qualified SQL path where the function resides, including catalog and schema information. +* `comment` - (Optional) User-provided free-form text description. +* `properties` - (Optional) A key-value pair object representing additional metadata or attributes associated with the function. +* `routine_body` - (Required) Specifies the body type of the function, either `SQL` for SQL-based functions or `EXTERNAL` for functions in external languages. +* `security_type` - (Required) The security type of the function, generally `DEFINER`. +* `sql_data_access`- (Required) The SQL data access level for the function. Possible values are: * `CONTAINS_SQL` - The function contains SQL statements. * `READS_SQL_DATA` - The function reads SQL data but does not modify it. * `NO_SQL` - The function does not contain SQL. -* `security_type` - (Optional) The security type of the function, generally `DEFINER`. +* `parameter_style` - (Required) Function parameter style (e.g, `S` for SQL). ## Attribute Reference diff --git a/internal/providers/pluginfw/products/catalog/resource_function.go b/internal/providers/pluginfw/products/catalog/resource_function.go index d303d11a1..8fab89439 100644 --- a/internal/providers/pluginfw/products/catalog/resource_function.go +++ b/internal/providers/pluginfw/products/catalog/resource_function.go @@ -66,9 +66,15 @@ func (r *FunctionResource) Schema(ctx context.Context, req resource.SchemaReques c.SetRequired("schema_name") c.SetRequired("input_params") c.SetRequired("data_type") - c.SetRequired("routine_body") + c.setRequired("full_data_type") c.SetRequired("routine_defintion") - c.SetRequired("language") + c.SetRequired("is_deterministic") + c.SetRequired("is_null_call") + c.SetRequired("specific_name") + c.SetRequired("routine_body") + c.SetRequired("security_type") + c.SetRequired("sql_data_access") + c.SetRequired("parameter_style") c.SetReadOnly("full_name") c.SetReadOnly("created_at") From fcd33dd81433969596a1a9652143fabecc94da60 Mon Sep 17 00:00:00 2001 From: dgomez04 Date: Tue, 3 Dec 2024 15:32:20 -0600 Subject: [PATCH 10/10] removed waiting time for function resource --- .../products/catalog/resource_function.go | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/internal/providers/pluginfw/products/catalog/resource_function.go b/internal/providers/pluginfw/products/catalog/resource_function.go index 8fab89439..ba0e40ed3 100644 --- a/internal/providers/pluginfw/products/catalog/resource_function.go +++ b/internal/providers/pluginfw/products/catalog/resource_function.go @@ -2,12 +2,8 @@ package catalog import ( "context" - "fmt" - "time" - "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" - "github.com/databricks/databricks-sdk-go/retries" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" @@ -15,7 +11,6 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" - "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" @@ -29,28 +24,6 @@ func ResourceFunction() resource.Resource { return &FunctionResource{} } -func waitForFunction(ctx context.Context, w *databricks.WorkspaceClient, funcInfo *catalog.FunctionInfo) diag.Diagnostics { - const timeout = 5 * time.Minute - - result, err := retries.Poll[catalog.FunctionInfo](ctx, timeout, func() (*catalog.FunctionInfo, *retries.Err) { - attempt, err := w.Functions.GetByName(ctx, funcInfo.FullName) - if err != nil { - if apierr.IsMissing(err) { - return nil, retries.Continue(fmt.Errorf("function %s is not yet available", funcInfo.FullName)) - } - return nil, retries.Halt(fmt.Errorf("failed to get function: %s", err)) - } - return attempt, nil - }) - - if err != nil { - return diag.Diagnostics{diag.NewErrorDiagnostic("failed to create function", err.Error())} - } - - *funcInfo = *result - return nil -} - type FunctionResource struct { Client *common.DatabricksClient } @@ -129,11 +102,6 @@ func (r *FunctionResource) Create(ctx context.Context, req resource.CreateReques return } - resp.Diagnostics.Append(waitForFunction(ctx, w, funcInfo)...) - if resp.Diagnostics.HasError() { - return - } - resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, funcInfo, &planFunc)...) if resp.Diagnostics.HasError() { return