From c065e80c367f1903667329576dcd7e4e8c67481c Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Fri, 25 Oct 2024 09:49:02 +0200 Subject: [PATCH 1/2] [Feature] Added `databricks_functions` data source It's now possible to fetch information about functions defined in a specific UC schema. Resolves #4111 --- docs/data-sources/functions.md | 86 ++++++++++++++++++ internal/providers/pluginfw/pluginfw.go | 2 + .../resources/catalog/data_functions.go | 90 +++++++++++++++++++ 3 files changed, 178 insertions(+) create mode 100644 docs/data-sources/functions.md create mode 100644 internal/providers/pluginfw/resources/catalog/data_functions.go diff --git a/docs/data-sources/functions.md b/docs/data-sources/functions.md new file mode 100644 index 000000000..fc7d85763 --- /dev/null +++ b/docs/data-sources/functions.md @@ -0,0 +1,86 @@ +--- +subcategory: "Unity Catalog" +--- +# databricks_functionss Data Source + +-> **Note** This data source can only be used with a workspace-level provider! + +Retrieves a list of [User-Defined Functions (UDFs) registered in the Unity Catalog](https://docs.databricks.com/en/udf/unity-catalog.html). + +## Example Usage + +List all functions defined in a specific schema (`main.default` in this example): + +```hcl +data "databricks_functions" "all" { + catalog_name = "main" + schema_name = "default" +} + +output "all_external_locations" { + value = data.databricks_functions.all.functions +} +``` + +## Argument Reference + +The following arguments are supported: + +* `catalog_name` - (Required) Name of [databricks_catalog](../resources/catalog.md). +* `schema_name` - (Required) Name of [databricks_schema](../resources/schema.md). +* `include_browse` - (Optional, Boolean) flag to specify if include UDFs in the response for which the principal can only access selective metadata for. + +## Attribute Reference + +This data source exports the following attributes: + +* `functions` - list of objects describing individual UDF. Each object consists of the following attributes (refer to [REST API documentation](https://docs.databricks.com/api/workspace/functions/list#functions) for up-to-date list of attributes. Default type is String): + * `name` - Name of function, relative to parent schema. + * `catalog_name` - Name of parent catalog. + * `schema_name` - Name of parent schema relative to its parent catalog. + * `input_params` - object describing input parameters. Consists of the single attribute: + * `parameters` - The array of definitions of the function's parameters: + * `name` - Name of parameter. + * `type_text` - Full data type spec, SQL/catalogString text. + * `type_json` - Full data type spec, JSON-serialized. + * `type_name` - Name of type (INT, STRUCT, MAP, etc.). + * `type_precision` - Digits of precision; required on Create for DecimalTypes. + * `type_scale` - Digits to right of decimal; Required on Create for DecimalTypes. + * `type_interval_type` - Format of IntervalType. + * `position` - Ordinal position of column (starting at position 0). + * `parameter_mode` - The mode of the function parameter. + * `parameter_type` - The type of function parameter (`PARAM` or `COLUMN`). + * `parameter_default` - Default value of the parameter. + * `comment` - User-provided free-form text description. + * `return_params` - Table function return parameters. See `input_params` for description. + * `data_type` - Scalar function return data type. + * `full_data_type` - Pretty printed function data type. + * `routine_body` - Function language (`SQL` or `EXTERNAL`). When `EXTERNAL` is used, the language of the routine function should be specified in the `external_language` field, and the `return_params` of the function cannot be used (as `TABLE` return type is not supported), and the `sql_data_access` field must be `NO_SQL`. + * `routine_definition` - Function body. + * `routine_dependencies` - Function dependencies. + * `parameter_style` - Function parameter style. `S` is the value for SQL. + * `is_deterministic` - Boolean flag specifying whether the function is deterministic. + * `sql_data_access` - Function SQL data access (`CONTAINS_SQL`, `READS_SQL_DATA`, `NO_SQL`). + * `is_null_call` - Boolean flag whether function null call. + * `security_type` - Function security type. (Enum: `DEFINER`). + * `specific_name` - Specific name of the function; Reserved for future use. + * `external_name` - External function name. + * `external_language` - External function language. + * `sql_path` - List of schemes whose objects can be referenced without qualification. + * `owner` - Username of current owner of function. + * `comment` - User-provided free-form text description. + * `properties` - JSON-serialized key-value pair map, encoded (escaped) as a string. + * `metastore_id` - Unique identifier of parent metastore. + * `full_name` - Full name of function, in form of catalog_name.schema_name.function__name + * `created_at` - Time at which this function was created, in epoch milliseconds. + * `created_by` - Username of function creator. + * `updated_at` - Time at which this function was created, in epoch milliseconds. + * `updated_by` - Username of user who last modified function. + * `function_id` - Id of Function, relative to parent schema. + * `browse_only` - Indicates whether the principal is limited to retrieving metadata for the associated object through the `BROWSE` privilege when `include_browse` is enabled in the request. + +## Related Resources + +The following resources are used in the same context: + +* [databricks_schema](./schema.md) to get information about a single schema diff --git a/internal/providers/pluginfw/pluginfw.go b/internal/providers/pluginfw/pluginfw.go index e813c94aa..5592e3e29 100644 --- a/internal/providers/pluginfw/pluginfw.go +++ b/internal/providers/pluginfw/pluginfw.go @@ -16,6 +16,7 @@ import ( "github.com/databricks/terraform-provider-databricks/commands" "github.com/databricks/terraform-provider-databricks/common" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/catalog" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" @@ -60,6 +61,7 @@ func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []f notificationdestinations.DataSourceNotificationDestinations, sharing.DataSourceShare, sharing.DataSourceShares, + catalog.DataSourceFunctions, } } diff --git a/internal/providers/pluginfw/resources/catalog/data_functions.go b/internal/providers/pluginfw/resources/catalog/data_functions.go new file mode 100644 index 000000000..6837800b5 --- /dev/null +++ b/internal/providers/pluginfw/resources/catalog/data_functions.go @@ -0,0 +1,90 @@ +package catalog + +import ( + "context" + "fmt" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func DataSourceFunctions() datasource.DataSource { + return &FunctionsDataSource{} +} + +var _ datasource.DataSourceWithConfigure = &FunctionsDataSource{} + +type FunctionsDataSource struct { + Client *common.DatabricksClient +} + +type FunctionsData struct { + CatalogName types.String `tfsdk:"catalog_name"` + SchemaName types.String `tfsdk:"schema_name"` + IncludeBrowse types.Bool `tfsdk:"include_browse" tf:"optional"` + Functions []catalog_tf.FunctionInfo `tfsdk:"functions" tf:"optional,computed"` +} + +func (d *FunctionsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "databricks_functions" +} + +func (d *FunctionsDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(FunctionsData{}, nil) + resp.Schema = schema.Schema{ + Attributes: attrs, + Blocks: blocks, + } +} + +func (d *FunctionsDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + if d.Client == nil { + d.Client = pluginfwcommon.ConfigureDataSource(req, resp) + } +} + +func (d *FunctionsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + w, diags := d.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var functions FunctionsData + diags = req.Config.Get(ctx, &functions) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + catalogName := functions.CatalogName.ValueString() + schemaName := functions.SchemaName.ValueString() + functionsInfosSdk, err := w.Functions.ListAll(ctx, catalog.ListFunctionsRequest{ + CatalogName: catalogName, + SchemaName: schemaName, + IncludeBrowse: functions.IncludeBrowse.ValueBool(), + }) + if err != nil { + if apierr.IsMissing(err) { + resp.State.RemoveResource(ctx) + } + resp.Diagnostics.AddError(fmt.Sprintf("failed to get functions for %s.%s schema", catalogName, schemaName), err.Error()) + return + } + for _, functionSdk := range functionsInfosSdk { + var function catalog_tf.FunctionInfo + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, functionSdk, &function)...) + if resp.Diagnostics.HasError() { + return + } + functions.Functions = append(functions.Functions, function) + } + resp.Diagnostics.Append(resp.State.Set(ctx, functions)...) +} From a334d0e6b3e53293671ed06aed3fa8d4798ebb23 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Wed, 30 Oct 2024 09:15:26 -0400 Subject: [PATCH 2/2] Update docs/data-sources/functions.md Co-authored-by: Miles Yucht --- docs/data-sources/functions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/data-sources/functions.md b/docs/data-sources/functions.md index fc7d85763..9a02db5b3 100644 --- a/docs/data-sources/functions.md +++ b/docs/data-sources/functions.md @@ -3,7 +3,7 @@ subcategory: "Unity Catalog" --- # databricks_functionss Data Source --> **Note** This data source can only be used with a workspace-level provider! +-> This data source can only be used with a workspace-level provider! Retrieves a list of [User-Defined Functions (UDFs) registered in the Unity Catalog](https://docs.databricks.com/en/udf/unity-catalog.html).