-
Notifications
You must be signed in to change notification settings - Fork 398
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[Feature] add
databricks_serving_endpoints
data source (#4226)
## Changes Adding `databricks_serving_endpoints` data source to allow retrieving information about all endpoints deployed on the workspace. This will allow us to add support for rate-limiting of FMAPI endpoints, which are enabled by default ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK --------- Co-authored-by: Alex Ott <[email protected]> Co-authored-by: Alex Ott <[email protected]>
- Loading branch information
1 parent
53b9bb6
commit 2fccd24
Showing
4 changed files
with
138 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
--- | ||
subcategory: "Serving" | ||
--- | ||
# databricks_serving_endpoints Data Source | ||
|
||
-> This resource can only be used with a workspace-level provider! | ||
|
||
This resource allows you to get information about [Model Serving](https://docs.databricks.com/machine-learning/model-serving/index.html) endpoints in Databricks. | ||
|
||
## Example Usage | ||
|
||
```hcl | ||
data "databricks_serving_endpoints" "all" { | ||
} | ||
resource "databricks_permissions" "ml_serving_usage" { | ||
for_each = databricks_serving_endpoints.all.endpoints | ||
serving_endpoint_id = each.value.id | ||
access_control { | ||
group_name = "users" | ||
permission_level = "CAN_VIEW" | ||
} | ||
access_control { | ||
group_name = databricks_group.auto.display_name | ||
permission_level = "CAN_MANAGE" | ||
} | ||
access_control { | ||
group_name = databricks_group.eng.display_name | ||
permission_level = "CAN_QUERY" | ||
} | ||
} | ||
``` | ||
|
||
## Attribute Reference | ||
|
||
The following attributes are exported: | ||
|
||
* `endpoints` - List of objects describing the serving endpoints. Each object consists of following attributes: | ||
* `name` - The name of the model serving endpoint. | ||
* `config` - The model serving endpoint configuration. | ||
* `tags` - Tags to be attached to the serving endpoint and automatically propagated to billing logs. | ||
* `rate_limits` - A list of rate limit blocks to be applied to the serving endpoint. | ||
* `ai_gateway` - A block with AI Gateway configuration for the serving endpoint. | ||
* `route_optimized` - A boolean enabling route optimization for the endpoint. | ||
|
||
See [`databricks_model_serving` resource](../resources/model_serving.md) for the full list of attributes for each block | ||
|
||
## Related Resources | ||
|
||
The following resources are often used in the same context: | ||
|
||
* [databricks_permissions](../resources/permissions.md#model-serving-usage) can control which groups or individual users can *Manage*, *Query* or *View* individual serving endpoints. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
78 changes: 78 additions & 0 deletions
78
internal/providers/pluginfw/products/serving/data_serving_endpoints.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
package serving | ||
|
||
import ( | ||
"context" | ||
|
||
"github.com/databricks/databricks-sdk-go/apierr" | ||
"github.com/databricks/terraform-provider-databricks/common" | ||
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" | ||
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" | ||
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" | ||
"github.com/databricks/terraform-provider-databricks/internal/service/serving_tf" | ||
"github.com/hashicorp/terraform-plugin-framework/datasource" | ||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema" | ||
) | ||
|
||
func DataSourceServingEndpoints() datasource.DataSource { | ||
return &ServingEndpointsDataSource{} | ||
} | ||
|
||
var _ datasource.DataSourceWithConfigure = &ServingEndpointsDataSource{} | ||
|
||
type ServingEndpointsDataSource struct { | ||
Client *common.DatabricksClient | ||
} | ||
|
||
type ServingEndpointsData struct { | ||
Endpoints []serving_tf.ServingEndpoint `tfsdk:"endpoints" tf:"optional,computed"` | ||
} | ||
|
||
func (d *ServingEndpointsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { | ||
resp.TypeName = "databricks_serving_endpoints" | ||
} | ||
|
||
func (d *ServingEndpointsDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { | ||
attrs, blocks := tfschema.DataSourceStructToSchemaMap(ServingEndpointsData{}, nil) | ||
resp.Schema = schema.Schema{ | ||
Attributes: attrs, | ||
Blocks: blocks, | ||
} | ||
} | ||
|
||
func (d *ServingEndpointsDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { | ||
if d.Client == nil { | ||
d.Client = pluginfwcommon.ConfigureDataSource(req, resp) | ||
} | ||
} | ||
|
||
func (d *ServingEndpointsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { | ||
w, diags := d.Client.GetWorkspaceClient() | ||
resp.Diagnostics.Append(diags...) | ||
if resp.Diagnostics.HasError() { | ||
return | ||
} | ||
|
||
var endpoints ServingEndpointsData | ||
diags = req.Config.Get(ctx, &endpoints) | ||
resp.Diagnostics.Append(diags...) | ||
if resp.Diagnostics.HasError() { | ||
return | ||
} | ||
endpointsInfoSdk, err := w.ServingEndpoints.ListAll(ctx) | ||
if err != nil { | ||
if apierr.IsMissing(err) { | ||
resp.State.RemoveResource(ctx) | ||
} | ||
resp.Diagnostics.AddError("failed to list endpoints", err.Error()) | ||
return | ||
} | ||
for _, endpoint := range endpointsInfoSdk { | ||
var endpointsInfo serving_tf.ServingEndpoint | ||
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, endpoint, &endpointsInfo)...) | ||
if resp.Diagnostics.HasError() { | ||
return | ||
} | ||
endpoints.Endpoints = append(endpoints.Endpoints, endpointsInfo) | ||
} | ||
resp.Diagnostics.Append(resp.State.Set(ctx, endpoints)...) | ||
} |