Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Internal][Draft] Plugin Framework Rollout #4101

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 2 additions & 14 deletions internal/providers/pluginfw/pluginfw.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,6 @@ import (
"github.com/databricks/terraform-provider-databricks/commands"
"github.com/databricks/terraform-provider-databricks/common"
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"

"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/diag"
Expand All @@ -43,18 +38,11 @@ type DatabricksProviderPluginFramework struct {
var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil)

func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource {
return []func() resource.Resource{
qualitymonitor.ResourceQualityMonitor,
library.ResourceLibrary,
}
return getPluginFrameworkResourcesToRegister()
}

func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource {
return []func() datasource.DataSource{
cluster.DataSourceCluster,
volume.DataSourceVolumes,
registered_model.DataSourceRegisteredModel,
}
return getPluginFrameworkDataSourcesToRegister()
}

func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) {
Expand Down
123 changes: 123 additions & 0 deletions internal/providers/pluginfw/pluginfw_rollout_utils.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package pluginfw

// This file contains all of the utils for controlling the plugin framework rollout.
// For migrated resources and data sources, we can add them to the two maps below to have them registered with the plugin framework.
// Users can manually specify resources and data sources to use SDK V2 instead of the plugin framework by setting the USE_SDK_V2_RESOURCES and USE_SDK_V2_DATA_SOURCES environment variables.
//
// Example: USE_SDK_V2_RESOURCES="databricks_library" would force the library resource to use SDK V2 instead of the plugin framework.

import (
"os"
"strings"

"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model"
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/resource"
)

// Map of resources that have been migrated from SDK V2 to plugin framework
var migratedResourceMap = map[string]func() resource.Resource{
"databricks_qualitymonitor": qualitymonitor.ResourceQualityMonitor,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Instead of hard-coding the names here, we can get them by calling the Metadata() method for each resource/data source.

"databricks_library": library.ResourceLibrary,
}

// Map of data sources that have been migrated from SDK V2 to plugin framework
var migratedDataSourceMap = map[string]func() datasource.DataSource{
"databricks_cluster": cluster.DataSourceCluster,
"databricks_volumes": volume.DataSourceVolumes,
"databricks_registered_model": registered_model.DataSourceRegisteredModel,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was not migrated IIUC. It should only exist in the plugin framework.

}

// GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework
func getUseSdkV2Resources() []string {
useSdkV2 := os.Getenv("USE_SDK_V2_RESOURCES")
if useSdkV2 == "" {
return []string{}
}
return strings.Split(useSdkV2, ",")
}

// GetUseSdkV2DataSources is a helper function to get name of data sources that should use SDK V2 instead of plugin framework
func getUseSdkV2DataSources() []string {
useSdkV2 := os.Getenv("USE_SDK_V2_DATA_SOURCES")
if useSdkV2 == "" {
return []string{}
}
return strings.Split(useSdkV2, ",")
}

// Helper function to check if a resource should use be in SDK V2 instead of plugin framework
func shouldUseSdkV2Resource(resourceName string) bool {
useSdkV2Resources := getUseSdkV2Resources()
for _, sdkV2Resource := range useSdkV2Resources {
if resourceName == sdkV2Resource {
return true
}
}
return false
}

// Helper function to check if a data source should use be in SDK V2 instead of plugin framework
func shouldUseSdkV2DataSource(dataSourceName string) bool {
sdkV2DataSources := getUseSdkV2DataSources()
for _, sdkV2DataSource := range sdkV2DataSources {
if dataSourceName == sdkV2DataSource {
return true
}
}
return false
}

// getPluginFrameworkResourcesToRegister is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework
func getPluginFrameworkResourcesToRegister() []func() resource.Resource {
var resources []func() resource.Resource

// Loop through the map and add resources if they're not specifically marked to use the SDK V2
for name, resourceFunc := range migratedResourceMap {
if !shouldUseSdkV2Resource(name) {
resources = append(resources, resourceFunc)
}
}

return resources
}

// getPluginFrameworkDataSourcesToRegister is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework
func getPluginFrameworkDataSourcesToRegister() []func() datasource.DataSource {
var dataSources []func() datasource.DataSource

// Loop through the map and add data sources if they're not specifically marked to use the SDK V2
for name, dataSourceFunc := range migratedDataSourceMap {
if !shouldUseSdkV2DataSource(name) {
dataSources = append(dataSources, dataSourceFunc)
}
}

return dataSources
}

// GetSdkV2ResourcesToRemove is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework
func GetSdkV2ResourcesToRemove() []string {
resourcesToRemove := []string{}
for name := range migratedResourceMap {
if !shouldUseSdkV2Resource(name) {
resourcesToRemove = append(resourcesToRemove, name)
}
}
return resourcesToRemove
}

// GetSdkV2DataSourcesToRemove is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework
func GetSdkV2DataSourcesToRemove() []string {
dataSourcesToRemove := []string{}
for name := range migratedDataSourceMap {
if !shouldUseSdkV2DataSource(name) {
dataSourcesToRemove = append(dataSourcesToRemove, name)
}
}
return dataSourcesToRemove
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ type ClusterInfo struct {
}

func (d *ClusterDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName)
resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName)
}

func (d *ClusterDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import (
)

const dataClusterTemplateById = `
data "databricks_cluster_pluginframework" "by_id" {
data "databricks_cluster "by_id" {
cluster_id = "{env.TEST_DEFAULT_CLUSTER_ID}"
}
`
Expand All @@ -21,8 +21,8 @@ func TestAccDataSourceClusterByID(t *testing.T) {
func TestAccDataSourceClusterByName(t *testing.T) {
acceptance.WorkspaceLevel(t, acceptance.Step{
Template: dataClusterTemplateById + `
data "databricks_cluster_pluginframework" "by_name" {
cluster_name = data.databricks_cluster_pluginframework.by_id.cluster_name
data "databricks_cluster" "by_name" {
cluster_name = data.databricks_cluster.by_id.cluster_name
}`,
})
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ type LibraryResource struct {
}

func (r *LibraryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName)
resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName)
}

func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func TestAccLibraryCreationPluginFramework(t *testing.T) {
"ResourceClass" = "SingleNode"
}
}
resource "databricks_library_pluginframework" "new_library" {
resource "databricks_library" "new_library" {
cluster_id = databricks_cluster.this.id
pypi {
repo = "https://pypi.org/dummy"
Expand Down Expand Up @@ -54,7 +54,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) {
"ResourceClass" = "SingleNode"
}
}
resource "databricks_library_pluginframework" "new_library" {
resource "databricks_library" "new_library" {
cluster_id = databricks_cluster.this.id
pypi {
repo = "https://pypi.org/simple"
Expand All @@ -80,7 +80,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) {
"ResourceClass" = "SingleNode"
}
}
resource "databricks_library_pluginframework" "new_library" {
resource "databricks_library" "new_library" {
cluster_id = databricks_cluster.this.id
pypi {
package = "networkx"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ type QualityMonitorResource struct {
}

func (r *QualityMonitorResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName)
resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName)
}

func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ func TestUcAccQualityMonitor(t *testing.T) {
acceptance.UnityWorkspaceLevel(t, acceptance.Step{
Template: commonPartQualityMonitoring + `

resource "databricks_quality_monitor_pluginframework" "testMonitorInference" {
resource "databricks_quality_monitor" "testMonitorInference" {
table_name = databricks_sql_table.myInferenceTable.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -81,7 +81,7 @@ func TestUcAccQualityMonitor(t *testing.T) {
}
}

resource "databricks_quality_monitor_pluginframework" "testMonitorTimeseries" {
resource "databricks_quality_monitor" "testMonitorTimeseries" {
table_name = databricks_sql_table.myTimeseries.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -104,7 +104,7 @@ func TestUcAccQualityMonitor(t *testing.T) {
}
}

resource "databricks_quality_monitor_pluginframework" "testMonitorSnapshot" {
resource "databricks_quality_monitor" "testMonitorSnapshot" {
table_name = databricks_sql_table.mySnapshot.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -121,7 +121,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) {
}
acceptance.UnityWorkspaceLevel(t, acceptance.Step{
Template: commonPartQualityMonitoring + `
resource "databricks_quality_monitor_pluginframework" "testMonitorInference" {
resource "databricks_quality_monitor" "testMonitorInference" {
table_name = databricks_sql_table.myInferenceTable.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -136,7 +136,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) {
`,
}, acceptance.Step{
Template: commonPartQualityMonitoring + `
resource "databricks_quality_monitor_pluginframework" "testMonitorInference" {
resource "databricks_quality_monitor" "testMonitorInference" {
table_name = databricks_sql_table.myInferenceTable.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -160,7 +160,7 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) {
acceptance.Step{
Template: commonPartQualityMonitoring + `

resource "databricks_quality_monitor_pluginframework" "testMonitorInference" {
resource "databricks_quality_monitor" "testMonitorInference" {
table_name = databricks_sql_table.myInferenceTable.id
assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}"
output_schema_name = databricks_schema.things.id
Expand All @@ -176,8 +176,8 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) {
},
acceptance.Step{
ImportState: true,
ResourceName: "databricks_quality_monitor_pluginframework.testMonitorInference",
ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor_pluginframework.testMonitorInference", "table_name"),
ResourceName: "databricks_quality_monitor.testMonitorInference",
ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor.testMonitorInference", "table_name"),
ImportStateVerify: true,
ImportStateVerifyIdentifierAttribute: "table_name",
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ type VolumesList struct {
}

func (d *VolumesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName)
edwardfeng-db marked this conversation as resolved.
Show resolved Hide resolved
resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName)
edwardfeng-db marked this conversation as resolved.
Show resolved Hide resolved
}

func (d *VolumesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ import (

func checkDataSourceVolumesPopulated(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
_, ok := s.Modules[0].Resources["data.databricks_volumes_pluginframework.this"]
require.True(t, ok, "data.databricks_volumes_pluginframework.this has to be there")
_, ok := s.Modules[0].Resources["data.databricks_volumes.this"]
require.True(t, ok, "data.databricks_volumes.this has to be there")
num_volumes, _ := strconv.Atoi(s.Modules[0].Outputs["volumes"].Value.(string))
assert.GreaterOrEqual(t, num_volumes, 1)
return nil
Expand Down Expand Up @@ -45,13 +45,13 @@ func TestUcAccDataSourceVolumes(t *testing.T) {
schema_name = databricks_schema.things.name
volume_type = "MANAGED"
}
data "databricks_volumes_pluginframework" "this" {
data "databricks_volumes" "this" {
catalog_name = databricks_catalog.sandbox.name
schema_name = databricks_schema.things.name
depends_on = [ databricks_volume.this ]
}
output "volumes" {
value = length(data.databricks_volumes_pluginframework.this.ids)
value = length(data.databricks_volumes.this.ids)
}
`,
Check: checkDataSourceVolumesPopulated(t),
Expand Down
Loading
Loading