Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
edwardfeng-db committed Oct 31, 2024
1 parent 5daf2ed commit d7cbde6
Show file tree
Hide file tree
Showing 14 changed files with 615 additions and 246 deletions.
3 changes: 3 additions & 0 deletions docs/data-sources/volumes.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ subcategory: "Unity Catalog"

Retrieves a list of [databricks_volume](../resources/volume.md) ids (full names), that were created by Terraform or manually.

## Plugin Framework Migration
The volumes data source has been migrated from sdkv2 to plugin framework in version 1.57。 If you encounter any problem with this data source and suspect it is due to the migration, you can fallback to sdkv2 by setting the environment variable in the following way `export USE_SDK_V2_DATA_SOURCES="databricks_volumes"`.

## Example Usage

Listing all volumes in a _things_ [databricks_schema](../resources/schema.md) of a _sandbox_ [databricks_catalog](../resources/catalog.md):
Expand Down
12 changes: 12 additions & 0 deletions docs/guides/troubleshooting.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,18 @@ TF_LOG=DEBUG DATABRICKS_DEBUG_TRUNCATE_BYTES=250000 terraform apply -no-color 2>

* Open a [new GitHub issue](https://github.com/databricks/terraform-provider-databricks/issues/new/choose) providing all information described in the issue template - debug logs, your Terraform code, Terraform & plugin versions, etc.

## Plugin Framework Migration Problems
The following resources and data sources have been migrated from sdkv2 to plugin framework。 If you encounter any problem with those, you can fallback to sdkv2 by setting the `USE_SDK_V2_RESOURCES` and `USE_SDK_V2_DATA_SOURCES` environment variables.

Example: `export USE_SDK_V2_RESOURCES="databricks_library,databricks_quality_monitor"`

### Resources migrated
- databricks_quality_monitor
- databricks_library
### Data sources migrated
- databricks_volumes


## Typical problems

### Data resources and Authentication is not configured errors
Expand Down
3 changes: 3 additions & 0 deletions docs/resources/library.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ Installs a [library](https://docs.databricks.com/libraries/index.html) on [datab

-> `databricks_library` resource would always start the associated cluster if it's not running, so make sure to have auto-termination configured. It's not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.

## Plugin Framework Migration
The library resource has been migrated from sdkv2 to plugin framework。 If you encounter any problem with this resource and suspect it is due to the migration, you can fallback to sdkv2 by setting the environment variable in the following way `export USE_SDK_V2_RESOURCES="databricks_library"`.

## Installing library on all clusters

You can install libraries on all clusters with the help of [databricks_clusters](../data-sources/clusters.md) data resource:
Expand Down
3 changes: 3 additions & 0 deletions docs/resources/quality_monitor.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ This resource allows you to manage [Lakehouse Monitors](https://docs.databricks.

A `databricks_quality_monitor` is attached to a [databricks_sql_table](sql_table.md) and can be of type timeseries, snapshot or inference.

## Plugin Framework Migration
The quality monitor resource has been migrated from sdkv2 to plugin framework。 If you encounter any problem with this resource and suspect it is due to the migration, you can fallback to sdkv2 by setting the environment variable in the following way `export USE_SDK_V2_RESOURCES="databricks_quality_monitor"`.

## Example Usage

```hcl
Expand Down
30 changes: 5 additions & 25 deletions internal/providers/pluginfw/pluginfw.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,6 @@ import (
"github.com/databricks/terraform-provider-databricks/commands"
"github.com/databricks/terraform-provider-databricks/common"
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/catalog"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/sharing"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"

"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
Expand All @@ -35,34 +26,23 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog"
)

func GetDatabricksProviderPluginFramework() provider.Provider {
p := &DatabricksProviderPluginFramework{}
func GetDatabricksProviderPluginFramework(sdkV2FallbackOptions ...SdkV2FallbackOption) provider.Provider {
p := &DatabricksProviderPluginFramework{sdkV2Fallbacks: sdkV2FallbackOptions}
return p
}

type DatabricksProviderPluginFramework struct {
sdkV2Fallbacks []SdkV2FallbackOption
}

var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil)

func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource {
return []func() resource.Resource{
qualitymonitor.ResourceQualityMonitor,
library.ResourceLibrary,
sharing.ResourceShare,
}
return getPluginFrameworkResourcesToRegister(p.sdkV2Fallbacks...)
}

func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource {
return []func() datasource.DataSource{
cluster.DataSourceCluster,
volume.DataSourceVolumes,
registered_model.DataSourceRegisteredModel,
notificationdestinations.DataSourceNotificationDestinations,
sharing.DataSourceShare,
sharing.DataSourceShares,
catalog.DataSourceFunctions,
}
return getPluginFrameworkDataSourcesToRegister(p.sdkV2Fallbacks...)
}

func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) {
Expand Down
205 changes: 205 additions & 0 deletions internal/providers/pluginfw/pluginfw_rollout_utils.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
package pluginfw

// This file contains all of the utils for controlling the plugin framework rollout.
// For migrated resources and data sources, we can add them to the two maps below to have them registered with the plugin framework.
// Users can manually specify resources and data sources to use SDK V2 instead of the plugin framework by setting the USE_SDK_V2_RESOURCES and USE_SDK_V2_DATA_SOURCES environment variables.
//
// Example: USE_SDK_V2_RESOURCES="databricks_library" would force the library resource to use SDK V2 instead of the plugin framework.

import (
"context"
"os"
"slices"
"strings"

"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/catalog"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/sharing"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/resource"
)

// List of resources that have been migrated from SDK V2 to plugin framework
var migratedResources = []func() resource.Resource{
qualitymonitor.ResourceQualityMonitor,
library.ResourceLibrary,
}

// List of data sources that have been migrated from SDK V2 to plugin framework
var migratedDataSources = []func() datasource.DataSource{
volume.DataSourceVolumes,
}

// List of resources that have been onboarded to the plugin framework - not migrated from sdkv2.
var pluginFwOnlyResources = []func() resource.Resource{
// TODO Add resources here
sharing.ResourceShare, // Using the staging name (with pluginframework suffix)
}

// List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2.
var pluginFwOnlyDataSources = []func() datasource.DataSource{
registered_model.DataSourceRegisteredModel,
notificationdestinations.DataSourceNotificationDestinations,
catalog.DataSourceFunctions,
// TODO: Add DataSourceCluster into migratedDataSources after fixing unit tests.
cluster.DataSourceCluster, // Using the staging name (with pluginframework suffix)
sharing.DataSourceShare, // Using the staging name (with pluginframework suffix)
sharing.DataSourceShares, // Using the staging name (with pluginframework suffix)
}

type sdkV2FallbackOptions struct {
resourceFallbacks []string
dataSourceFallbacks []string
}

// SdkV2FallbackOption is an interface for acceptance tests to specify resources / data sources to fallback to SDK V2
type SdkV2FallbackOption interface {
Apply(*sdkV2FallbackOptions)
}

type sdkV2ResourceFallback struct {
resourceFallbacks []string
}

func (o *sdkV2ResourceFallback) Apply(options *sdkV2FallbackOptions) {
options.resourceFallbacks = o.resourceFallbacks
}

// WithSdkV2ResourceFallbacks is a helper function to specify resources to fallback to SDK V2
func WithSdkV2ResourceFallbacks(fallbacks ...string) SdkV2FallbackOption {
return &sdkV2ResourceFallback{resourceFallbacks: fallbacks}
}

type sdkv2DataSourceFallback struct {
dataSourceFallbacks []string
}

func (o *sdkv2DataSourceFallback) Apply(options *sdkV2FallbackOptions) {
options.dataSourceFallbacks = o.dataSourceFallbacks
}

// WithSdkV2DataSourceFallbacks is a helper function to specify data sources to fallback to SDK V2
func WithSdkV2DataSourceFallbacks(fallbacks []string) SdkV2FallbackOption {
return &sdkv2DataSourceFallback{dataSourceFallbacks: fallbacks}
}

// GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework
func getUseSdkV2Resources() []string {
useSdkV2 := os.Getenv("USE_SDK_V2_RESOURCES")
if useSdkV2 == "" {
return []string{}
}
return strings.Split(useSdkV2, ",")
}

// GetUseSdkV2DataSources is a helper function to get name of data sources that should use SDK V2 instead of plugin framework
func getUseSdkV2DataSources() []string {
useSdkV2 := os.Getenv("USE_SDK_V2_DATA_SOURCES")
if useSdkV2 == "" {
return []string{}
}
return strings.Split(useSdkV2, ",")
}

// Helper function to check if a resource should use be in SDK V2 instead of plugin framework
func shouldUseSdkV2Resource(resourceName string) bool {
useSdkV2Resources := getUseSdkV2Resources()
return slices.Contains(useSdkV2Resources, resourceName)
}

// Helper function to check if a data source should use be in SDK V2 instead of plugin framework
func shouldUseSdkV2DataSource(dataSourceName string) bool {
sdkV2DataSources := getUseSdkV2DataSources()
return slices.Contains(sdkV2DataSources, dataSourceName)
}

// getPluginFrameworkResourcesToRegister is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework
func getPluginFrameworkResourcesToRegister(sdkV2Fallbacks ...SdkV2FallbackOption) []func() resource.Resource {
fallbackOption := sdkV2FallbackOptions{}
for _, o := range sdkV2Fallbacks {
o.Apply(&fallbackOption)
}

var resources []func() resource.Resource

// Loop through the map and add resources if they're not specifically marked to use the SDK V2
for _, resourceFunc := range migratedResources {
name := getResourceName(resourceFunc)
if !shouldUseSdkV2Resource(name) && !slices.Contains(fallbackOption.resourceFallbacks, name) {
resources = append(resources, resourceFunc)
}
}

return append(resources, pluginFwOnlyResources...)
}

// getPluginFrameworkDataSourcesToRegister is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework
func getPluginFrameworkDataSourcesToRegister(sdkV2Fallbacks ...SdkV2FallbackOption) []func() datasource.DataSource {
fallbackOption := sdkV2FallbackOptions{}
for _, o := range sdkV2Fallbacks {
o.Apply(&fallbackOption)
}

var dataSources []func() datasource.DataSource

// Loop through the map and add data sources if they're not specifically marked to use the SDK V2
for _, dataSourceFunc := range migratedDataSources {
name := getDataSourceName(dataSourceFunc)
if !shouldUseSdkV2DataSource(name) && !slices.Contains(fallbackOption.dataSourceFallbacks, name) {
dataSources = append(dataSources, dataSourceFunc)
}
}

return append(dataSources, pluginFwOnlyDataSources...)
}

func getResourceName(resourceFunc func() resource.Resource) string {
resp := resource.MetadataResponse{}
resourceFunc().Metadata(context.Background(), resource.MetadataRequest{ProviderTypeName: "databricks"}, &resp)
return resp.TypeName
}

func getDataSourceName(dataSourceFunc func() datasource.DataSource) string {
resp := datasource.MetadataResponse{}
dataSourceFunc().Metadata(context.Background(), datasource.MetadataRequest{ProviderTypeName: "databricks"}, &resp)
return resp.TypeName
}

// GetSdkV2ResourcesToRemove is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework
func GetSdkV2ResourcesToRemove(sdkV2Fallbacks ...SdkV2FallbackOption) []string {
fallbackOption := sdkV2FallbackOptions{}
for _, o := range sdkV2Fallbacks {
o.Apply(&fallbackOption)
}

resourcesToRemove := []string{}
for _, resourceFunc := range migratedResources {
name := getResourceName(resourceFunc)
if !shouldUseSdkV2Resource(name) && !slices.Contains(fallbackOption.resourceFallbacks, name) {
resourcesToRemove = append(resourcesToRemove, name)
}
}
return resourcesToRemove
}

// GetSdkV2DataSourcesToRemove is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework
func GetSdkV2DataSourcesToRemove(sdkV2Fallbacks ...SdkV2FallbackOption) []string {
fallbackOption := sdkV2FallbackOptions{}
for _, o := range sdkV2Fallbacks {
o.Apply(&fallbackOption)
}

dataSourcesToRemove := []string{}
for _, dataSourceFunc := range migratedDataSources {
name := getDataSourceName(dataSourceFunc)
if !shouldUseSdkV2DataSource(name) && !slices.Contains(fallbackOption.dataSourceFallbacks, name) {
dataSourcesToRemove = append(dataSourcesToRemove, name)
}
}
return dataSourcesToRemove
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,15 @@ func readLibrary(ctx context.Context, w *databricks.WorkspaceClient, waitParams
type LibraryExtended struct {
compute_tf.Library
ClusterId types.String `tfsdk:"cluster_id"`
ID types.String `tfsdk:"id" tf:"optional,computed"` // Adding ID field to stay compatible with SDKv2
}

type LibraryResource struct {
Client *common.DatabricksClient
}

func (r *LibraryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName)
resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName)
}

func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
Expand Down Expand Up @@ -139,6 +140,8 @@ func (r *LibraryResource) Create(ctx context.Context, req resource.CreateRequest

resp.Diagnostics.Append(readLibrary(ctx, w, waitParams, libraryRep, &installedLib)...)

installedLib.ID = types.StringValue(libGoSDK.String())

if resp.Diagnostics.HasError() {
return
}
Expand Down
Loading

0 comments on commit d7cbde6

Please sign in to comment.