Skip to content

Commit

Permalink
it passes a test
Browse files Browse the repository at this point in the history
  • Loading branch information
mgyucht committed Dec 5, 2024
1 parent e4aff40 commit 30398da
Show file tree
Hide file tree
Showing 31 changed files with 4,817 additions and 4,637 deletions.
14 changes: 7 additions & 7 deletions .codegen/model.go.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ func (a {{.PascalName}}) GetComplexFieldTypes(ctx context.Context) map[string]re
}
}

// ToAttrType returns the representation of {{.PascalName}} in the Terraform plugin framework type
// ToObjectType returns the representation of {{.PascalName}} in the Terraform plugin framework type
// system.
func (a {{.PascalName}}) ToAttrType(ctx context.Context) types.ObjectType {
func (a {{.PascalName}}) ToObjectType(ctx context.Context) types.ObjectType {
return types.ObjectType{
AttrTypes: map[string]attr.Type{
{{ range .Fields -}}
Expand All @@ -96,7 +96,7 @@ func (a {{.PascalName}}) ToAttrType(ctx context.Context) types.ObjectType {

{{/*
Jobs has a recursive structure: Tasks contain ForEachTasks, which contain Tasks.
Because of this, GetComplexFieldTypes and ToAttrType will never terminate.
Because of this, GetComplexFieldTypes and ToObjectType will never terminate.
TODO: capture visited types in the context to ensure these methods terminate,
even when they are called recursively.
*/}}
Expand All @@ -120,8 +120,8 @@ even when they are called recursively.
{{- else if .IsInt -}}types.Int64Type
{{- else if .IsAny -}}types.ObjectType{}
{{- else if .IsByteStream}}types.ObjectType{}
{{- else if .IsObject -}}{{/* Objects are treated as lists from a TFSDK type perspective. */}}basetypes.ListType{
ElemType: {{- if .IsExternal -}}{{.Package.Name}}_tf.{{- end -}}{{.PascalName}}{}.ToAttrType(ctx),
{{- else if or .IsEmpty .IsObject -}}{{/* Objects are treated as lists from a TFSDK type perspective. */}}basetypes.ListType{
ElemType: {{- if .IsExternal -}}{{.Package.Name}}_tf.{{- end -}}{{.PascalName}}{}.ToObjectType(ctx),
}
{{- end -}}
{{- end -}}
Expand All @@ -138,7 +138,7 @@ even when they are called recursively.
{{- else if .IsFloat64 -}}types.Float64{}
{{- else if .IsInt -}}types.Int64{}
{{- else if .IsAny -}}struct{}{}
{{- else if .IsObject -}}{{.PascalName}}{}
{{- else if or .IsEmpty .IsObject -}}{{.PascalName}}{}
{{- end -}}
{{- end -}}
{{- end -}}
Expand Down Expand Up @@ -177,7 +177,7 @@ even when they are called recursively.
{{- if not . }}any /* ERROR */
{{- else if .IsExternal }}{{.Package.Name}}.{{.PascalName}}
{{- else if .IsAny}}any
{{- else if .IsEmpty}}[]{{.PascalName}}
{{- else if .IsEmpty}}types.List
{{- else if .IsString}}types.String
{{- else if .IsBool}}types.Bool
{{- else if .IsInt64}}types.Int64
Expand Down
37 changes: 32 additions & 5 deletions internal/providers/pluginfw/products/catalog/data_functions.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package catalog
import (
"context"
"fmt"
"reflect"

"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
Expand All @@ -12,8 +13,10 @@ import (
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
"github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
)

Expand All @@ -30,10 +33,27 @@ type FunctionsDataSource struct {
}

type FunctionsData struct {
CatalogName types.String `tfsdk:"catalog_name"`
SchemaName types.String `tfsdk:"schema_name"`
IncludeBrowse types.Bool `tfsdk:"include_browse" tf:"optional"`
Functions []catalog_tf.FunctionInfo `tfsdk:"functions" tf:"optional,computed"`
CatalogName types.String `tfsdk:"catalog_name"`
SchemaName types.String `tfsdk:"schema_name"`
IncludeBrowse types.Bool `tfsdk:"include_browse" tf:"optional"`
Functions types.List `tfsdk:"functions" tf:"optional,computed"`
}

func (FunctionsData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"functions": reflect.TypeOf(catalog_tf.FunctionInfo{}),
}
}

func (FunctionsData) ToObjectType(ctx context.Context) types.ObjectType {
return types.ObjectType{
AttrTypes: map[string]attr.Type{
"catalog_name": types.StringType,
"schema_name": types.StringType,
"include_browse": types.BoolType,
"functions": types.ListType{ElemType: catalog_tf.FunctionInfo{}.ToObjectType(ctx)},
},
}
}

func (d *FunctionsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
Expand Down Expand Up @@ -82,13 +102,20 @@ func (d *FunctionsDataSource) Read(ctx context.Context, req datasource.ReadReque
resp.Diagnostics.AddError(fmt.Sprintf("failed to get functions for %s.%s schema", catalogName, schemaName), err.Error())
return
}
tfFunctions := []catalog_tf.FunctionInfo{}
for _, functionSdk := range functionsInfosSdk {
var function catalog_tf.FunctionInfo
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, functionSdk, &function)...)
if resp.Diagnostics.HasError() {
return
}
functions.Functions = append(functions.Functions, function)
tfFunctions = append(tfFunctions, function)
}
var dd diag.Diagnostics
functions.Functions, dd = types.ListValueFrom(ctx, catalog_tf.FunctionInfo{}.ToObjectType(ctx), tfFunctions)
resp.Diagnostics.Append(dd...)
if resp.Diagnostics.HasError() {
return
}
resp.Diagnostics.Append(resp.State.Set(ctx, functions)...)
}
93 changes: 51 additions & 42 deletions internal/providers/pluginfw/products/cluster/data_cluster.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,18 @@ package cluster
import (
"context"
"fmt"
"reflect"
"strings"

"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/terraform-provider-databricks/common"
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
"github.com/databricks/terraform-provider-databricks/internal/service/compute_tf"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
Expand All @@ -32,9 +34,25 @@ type ClusterDataSource struct {
}

type ClusterInfo struct {
ClusterId types.String `tfsdk:"cluster_id" tf:"optional,computed"`
Name types.String `tfsdk:"cluster_name" tf:"optional,computed"`
ClusterInfo []compute_tf.ClusterDetails `tfsdk:"cluster_info" tf:"optional,computed"`
ClusterId types.String `tfsdk:"cluster_id" tf:"optional,computed"`
Name types.String `tfsdk:"cluster_name" tf:"optional,computed"`
ClusterInfo types.List `tfsdk:"cluster_info" tf:"optional,computed"`
}

func (ClusterInfo) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"cluster_info": reflect.TypeOf(compute_tf.ClusterDetails{}),
}
}

func (ClusterInfo) ToObjectType(ctx context.Context) types.ObjectType {
return types.ObjectType{
AttrTypes: map[string]attr.Type{
"cluster_id": types.StringType,
"cluster_name": types.StringType,
"cluster_info": types.ListType{ElemType: compute_tf.ClusterDetails{}.ToObjectType(ctx)},
},
}
}

func (d *ClusterDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
Expand Down Expand Up @@ -84,52 +102,43 @@ func (d *ClusterDataSource) Read(ctx context.Context, req datasource.ReadRequest
}
clusterName := clusterInfo.Name.ValueString()
clusterId := clusterInfo.ClusterId.ValueString()
c, diag := d.getClusterDetails(ctx, w, clusterName, clusterId)
resp.Diagnostics.Append(diag...)
if resp.Diagnostics.HasError() {
return
}
cc := []compute_tf.ClusterDetails{}
for _, cluster := range c {
var tfCluster compute_tf.ClusterDetails
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &tfCluster)...)
cc = append(cc, tfCluster)
}
resp.Diagnostics.Append(validateClustersList(ctx, cc, clusterName)...)
clusterInfo.ClusterId = cc[0].ClusterId
clusterInfo.Name = cc[0].ClusterName
resp.Diagnostics.Append(resp.State.Set(ctx, clusterInfo)...)
}

func (d *ClusterDataSource) getClusterDetails(ctx context.Context, w *databricks.WorkspaceClient, clusterName, clusterId string) (c []compute.ClusterDetails, dd diag.Diagnostics) {
if clusterName != "" {
clustersGoSDk, err := w.Clusters.ListAll(ctx, compute.ListClustersRequest{})
var err error
c, err = w.Clusters.ListAll(ctx, compute.ListClustersRequest{})
if err != nil {
resp.Diagnostics.AddError("failed to list clusters", err.Error())
return
}
var clustersTfSDK []compute_tf.ClusterDetails
for _, cluster := range clustersGoSDk {
var clusterDetails compute_tf.ClusterDetails
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &clusterDetails)...)
if resp.Diagnostics.HasError() {
return
}
clustersTfSDK = append(clustersTfSDK, clusterDetails)
}
namedClusters := []compute_tf.ClusterDetails{}
for _, cluster := range clustersTfSDK {
if cluster.ClusterName == clusterInfo.Name {
namedClusters = append(namedClusters, cluster)
}
}
resp.Diagnostics.Append(validateClustersList(ctx, namedClusters, clusterName)...)
if resp.Diagnostics.HasError() {
dd.AddError("failed to list clusters", err.Error())
return
}
clusterInfo.ClusterInfo = namedClusters[0:1]
} else if clusterId != "" {
return
}
if clusterId != "" {
cluster, err := w.Clusters.GetByClusterId(ctx, clusterId)
if err != nil {
if apierr.IsMissing(err) {
resp.State.RemoveResource(ctx)
}
resp.Diagnostics.AddError(fmt.Sprintf("failed to get cluster with cluster id: %s", clusterId), err.Error())
return
}
var clusterDetails compute_tf.ClusterDetails
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &clusterDetails)...)
if resp.Diagnostics.HasError() {
dd.AddError(fmt.Sprintf("failed to get cluster with cluster id: %s", clusterId), err.Error())
return
}
clusterInfo.ClusterInfo = []compute_tf.ClusterDetails{clusterDetails}
} else {
resp.Diagnostics.AddError("you need to specify either `cluster_name` or `cluster_id`", "")
c = []compute.ClusterDetails{*cluster}
return
}
clusterInfo.ClusterId = clusterInfo.ClusterInfo[0].ClusterId
clusterInfo.Name = clusterInfo.ClusterInfo[0].ClusterName
resp.Diagnostics.Append(resp.State.Set(ctx, clusterInfo)...)

dd.AddError("you need to specify either `cluster_name` or `cluster_id`", "")
return
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package notificationdestinations
import (
"context"
"fmt"
"reflect"
"slices"
"strings"

Expand All @@ -13,6 +14,7 @@ import (
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
"github.com/databricks/terraform-provider-databricks/internal/service/settings_tf"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
Expand All @@ -32,9 +34,27 @@ type NotificationDestinationsDataSource struct {
}

type NotificationDestinationsInfo struct {
DisplayNameContains types.String `tfsdk:"display_name_contains" tf:"optional"`
Type types.String `tfsdk:"type" tf:"optional"`
NotificationDestinations []settings_tf.ListNotificationDestinationsResult `tfsdk:"notification_destinations" tf:"computed"`
DisplayNameContains types.String `tfsdk:"display_name_contains" tf:"optional"`
Type types.String `tfsdk:"type" tf:"optional"`
NotificationDestinations types.List `tfsdk:"notification_destinations" tf:"computed"`
}

func (NotificationDestinationsInfo) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"notification_destinations": reflect.TypeOf(settings_tf.ListNotificationDestinationsResult{}),
}
}

func (NotificationDestinationsInfo) ToObjectType(ctx context.Context) types.ObjectType {
return types.ObjectType{
AttrTypes: map[string]attr.Type{
"display_name_contains": types.StringType,
"type": types.StringType,
"notification_destinations": types.ListType{
ElemType: settings_tf.ListNotificationDestinationsResult{}.ToObjectType(ctx),
},
},
}
}

func (d *NotificationDestinationsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
Expand Down Expand Up @@ -114,7 +134,13 @@ func (d *NotificationDestinationsDataSource) Read(ctx context.Context, req datas
notificationsTfSdk = append(notificationsTfSdk, notificationDestination)
}

notificationInfo.NotificationDestinations = notificationsTfSdk
var dd diag.Diagnostics
notificationInfo.NotificationDestinations, dd = types.ListValueFrom(ctx, settings_tf.ListNotificationDestinationsResult{}.ToObjectType(ctx), notificationsTfSdk)
resp.Diagnostics.Append(dd...)
if resp.Diagnostics.HasError() {
return
}

resp.Diagnostics.Append(resp.State.Set(ctx, notificationInfo)...)

}
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ func (m MonitorInfoExtended) GetComplexFieldTypes(ctx context.Context) map[strin
}

func (m MonitorInfoExtended) ToObjectType(ctx context.Context) types.ObjectType {
tpe := m.MonitorInfo.ToAttrType(ctx)
tpe := m.MonitorInfo.ToObjectType(ctx)
tpe.AttrTypes["warehouse_id"] = types.StringType
tpe.AttrTypes["skip_builtin_dashboard"] = types.BoolType
tpe.AttrTypes["id"] = types.StringType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package registered_model
import (
"context"
"fmt"
"reflect"

"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
Expand All @@ -12,6 +13,7 @@ import (
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
"github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
Expand All @@ -32,10 +34,27 @@ type RegisteredModelDataSource struct {
}

type RegisteredModelData struct {
FullName types.String `tfsdk:"full_name"`
IncludeAliases types.Bool `tfsdk:"include_aliases" tf:"optional"`
IncludeBrowse types.Bool `tfsdk:"include_browse" tf:"optional"`
ModelInfo []catalog_tf.RegisteredModelInfo `tfsdk:"model_info" tf:"optional,computed"`
FullName types.String `tfsdk:"full_name"`
IncludeAliases types.Bool `tfsdk:"include_aliases" tf:"optional"`
IncludeBrowse types.Bool `tfsdk:"include_browse" tf:"optional"`
ModelInfo types.List `tfsdk:"model_info" tf:"optional,computed"`
}

func (RegisteredModelData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"model_info": reflect.TypeOf(catalog_tf.RegisteredModelInfo{}),
}
}

func (RegisteredModelData) ToObjectType(ctx context.Context) types.ObjectType {
return types.ObjectType{
AttrTypes: map[string]attr.Type{
"full_name": types.StringType,
"include_aliases": types.BoolType,
"include_browse": types.BoolType,
"model_info": types.ListType{ElemType: catalog_tf.RegisteredModelInfo{}.ToObjectType(ctx)},
},
}
}

func (d *RegisteredModelDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
Expand Down Expand Up @@ -93,6 +112,11 @@ func (d *RegisteredModelDataSource) Read(ctx context.Context, req datasource.Rea
modelInfo.Aliases, d = basetypes.NewListValueFrom(ctx, modelInfo.Aliases.ElementType(ctx), []catalog_tf.RegisteredModelAlias{})
resp.Diagnostics.Append(d...)
}
registeredModel.ModelInfo = append(registeredModel.ModelInfo, modelInfo)
var dd diag.Diagnostics
registeredModel.ModelInfo, dd = types.ListValueFrom(ctx, catalog_tf.RegisteredModelInfo{}.ToObjectType(ctx), []catalog_tf.RegisteredModelInfo{modelInfo})
resp.Diagnostics.Append(dd...)
if resp.Diagnostics.HasError() {
return
}
resp.Diagnostics.Append(resp.State.Set(ctx, registeredModel)...)
}
Loading

0 comments on commit 30398da

Please sign in to comment.