From 5aa1320382fdb8a88326aa8d03c80c708a59f76d Mon Sep 17 00:00:00 2001 From: edwardfeng-db Date: Mon, 21 Oct 2024 20:52:02 -0700 Subject: [PATCH 1/4] update --- internal/providers/pluginfw/pluginfw.go | 12 +- .../pluginfw/pluginfw_rollout_utils.go | 148 +++++++++++++++ .../resources/cluster/data_cluster.go | 2 +- .../cluster/data_cluster_acc_test.go | 6 +- .../resources/library/resource_library.go | 2 +- .../library/resource_library_acc_test.go | 6 +- .../resource_quality_monitor.go | 2 +- .../resource_quality_monitor_acc_test.go | 16 +- .../pluginfw/resources/volume/data_volumes.go | 2 +- .../resources/volume/data_volumes_acc_test.go | 8 +- internal/providers/sdkv2/sdkv2.go | 168 ++++++++++++++++++ 11 files changed, 346 insertions(+), 26 deletions(-) create mode 100644 internal/providers/pluginfw/pluginfw_rollout_utils.go diff --git a/internal/providers/pluginfw/pluginfw.go b/internal/providers/pluginfw/pluginfw.go index db811d5ae2..38e26d7d70 100644 --- a/internal/providers/pluginfw/pluginfw.go +++ b/internal/providers/pluginfw/pluginfw.go @@ -16,12 +16,15 @@ import ( "github.com/databricks/terraform-provider-databricks/commands" "github.com/databricks/terraform-provider-databricks/common" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" +<<<<<<< Updated upstream "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" +======= +>>>>>>> Stashed changes "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -44,19 +47,20 @@ type DatabricksProviderPluginFramework struct { var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil) func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource { - return []func() resource.Resource{ - qualitymonitor.ResourceQualityMonitor, - library.ResourceLibrary, - } + return getPluginFrameworkResourcesToRegister() } func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource { +<<<<<<< Updated upstream return []func() datasource.DataSource{ cluster.DataSourceCluster, volume.DataSourceVolumes, registered_model.DataSourceRegisteredModel, notificationdestinations.DataSourceNotificationDestinations, } +======= + return getPluginFrameworkDataSourcesToRegister() +>>>>>>> Stashed changes } func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go new file mode 100644 index 0000000000..535d5b565a --- /dev/null +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -0,0 +1,148 @@ +package pluginfw + +// This file contains all of the utils for controlling the plugin framework rollout. +// For migrated resources and data sources, we can add them to the two maps below to have them registered with the plugin framework. +// Users can manually specify resources and data sources to use SDK V2 instead of the plugin framework by setting the USE_SDK_V2_RESOURCES and USE_SDK_V2_DATA_SOURCES environment variables. +// +// Example: USE_SDK_V2_RESOURCES="databricks_library" would force the library resource to use SDK V2 instead of the plugin framework. + +import ( + "context" + "os" + "strings" + + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// List of resources that have been migrated from SDK V2 to plugin framework +var migratedResources = []func() resource.Resource{ + qualitymonitor.ResourceQualityMonitor, + library.ResourceLibrary, +} + +// List of data sources that have been migrated from SDK V2 to plugin framework +var migratedDataSources = []func() datasource.DataSource{ + // cluster.DataSourceCluster, + volume.DataSourceVolumes, +} + +// List of resources that have been onboarded to the plugin framework - not migrated from sdkv2. +var onboardedResources = []func() resource.Resource{ + // TODO Add resources here +} + +// List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2. +var onboardedDataSources = []func() datasource.DataSource{ + registered_model.DataSourceRegisteredModel, +} + +// GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework +func getUseSdkV2Resources() []string { + useSdkV2 := os.Getenv("USE_SDK_V2_RESOURCES") + if useSdkV2 == "" { + return []string{} + } + return strings.Split(useSdkV2, ",") +} + +// GetUseSdkV2DataSources is a helper function to get name of data sources that should use SDK V2 instead of plugin framework +func getUseSdkV2DataSources() []string { + useSdkV2 := os.Getenv("USE_SDK_V2_DATA_SOURCES") + if useSdkV2 == "" { + return []string{} + } + return strings.Split(useSdkV2, ",") +} + +// Helper function to check if a resource should use be in SDK V2 instead of plugin framework +func shouldUseSdkV2Resource(resourceName string) bool { + useSdkV2Resources := getUseSdkV2Resources() + for _, sdkV2Resource := range useSdkV2Resources { + if resourceName == sdkV2Resource { + return true + } + } + return false +} + +// Helper function to check if a data source should use be in SDK V2 instead of plugin framework +func shouldUseSdkV2DataSource(dataSourceName string) bool { + sdkV2DataSources := getUseSdkV2DataSources() + for _, sdkV2DataSource := range sdkV2DataSources { + if dataSourceName == sdkV2DataSource { + return true + } + } + return false +} + +// getPluginFrameworkResourcesToRegister is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework +func getPluginFrameworkResourcesToRegister() []func() resource.Resource { + var resources []func() resource.Resource + + // Loop through the map and add resources if they're not specifically marked to use the SDK V2 + for _, resourceFunc := range migratedResources { + name := getResourceName(resourceFunc) + if !shouldUseSdkV2Resource(name) { + resources = append(resources, resourceFunc) + } + } + + return append(resources, onboardedResources...) +} + +// getPluginFrameworkDataSourcesToRegister is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework +func getPluginFrameworkDataSourcesToRegister() []func() datasource.DataSource { + var dataSources []func() datasource.DataSource + + // Loop through the map and add data sources if they're not specifically marked to use the SDK V2 + for _, dataSourceFunc := range migratedDataSources { + name := getDataSourceName(dataSourceFunc) + if !shouldUseSdkV2DataSource(name) { + dataSources = append(dataSources, dataSourceFunc) + } + } + + return append(dataSources, onboardedDataSources...) +} + +func getResourceName(resourceFunc func() resource.Resource) string { + resp := resource.MetadataResponse{} + resourceFunc().Metadata(context.Background(), resource.MetadataRequest{ProviderTypeName: "databricks"}, &resp) + return resp.TypeName +} + +func getDataSourceName(dataSourceFunc func() datasource.DataSource) string { + resp := datasource.MetadataResponse{} + dataSourceFunc().Metadata(context.Background(), datasource.MetadataRequest{ProviderTypeName: "databricks"}, &resp) + return resp.TypeName +} + +// GetSdkV2ResourcesToRemove is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework +func GetSdkV2ResourcesToRemove() []string { + resourcesToRemove := []string{} + for _, resourceFunc := range migratedResources { + name := getResourceName(resourceFunc) + if !shouldUseSdkV2Resource(name) { + resourcesToRemove = append(resourcesToRemove, name) + } + } + return resourcesToRemove +} + +// GetSdkV2DataSourcesToRemove is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework +func GetSdkV2DataSourcesToRemove() []string { + dataSourcesToRemove := []string{} + for _, dataSourceFunc := range migratedDataSources { + name := getDataSourceName(dataSourceFunc) + if !shouldUseSdkV2DataSource(name) { + dataSourcesToRemove = append(dataSourcesToRemove, name) + } + } + return dataSourcesToRemove +} diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster.go b/internal/providers/pluginfw/resources/cluster/data_cluster.go index 8d0499ccb8..7238e9ec8a 100644 --- a/internal/providers/pluginfw/resources/cluster/data_cluster.go +++ b/internal/providers/pluginfw/resources/cluster/data_cluster.go @@ -38,7 +38,7 @@ type ClusterInfo struct { } func (d *ClusterDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *ClusterDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go b/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go index cbac44de04..4157cad809 100644 --- a/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go +++ b/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go @@ -7,7 +7,7 @@ import ( ) const dataClusterTemplateById = ` - data "databricks_cluster_pluginframework" "by_id" { + data "databricks_cluster "by_id" { cluster_id = "{env.TEST_DEFAULT_CLUSTER_ID}" } ` @@ -21,8 +21,8 @@ func TestAccDataSourceClusterByID(t *testing.T) { func TestAccDataSourceClusterByName(t *testing.T) { acceptance.WorkspaceLevel(t, acceptance.Step{ Template: dataClusterTemplateById + ` - data "databricks_cluster_pluginframework" "by_name" { - cluster_name = data.databricks_cluster_pluginframework.by_id.cluster_name + data "databricks_cluster" "by_name" { + cluster_name = data.databricks_cluster.by_id.cluster_name }`, }) } diff --git a/internal/providers/pluginfw/resources/library/resource_library.go b/internal/providers/pluginfw/resources/library/resource_library.go index 1c999bd2ed..fffc0f95e3 100644 --- a/internal/providers/pluginfw/resources/library/resource_library.go +++ b/internal/providers/pluginfw/resources/library/resource_library.go @@ -69,7 +69,7 @@ type LibraryResource struct { } func (r *LibraryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/library/resource_library_acc_test.go b/internal/providers/pluginfw/resources/library/resource_library_acc_test.go index 153657ae41..11d2b42c84 100644 --- a/internal/providers/pluginfw/resources/library/resource_library_acc_test.go +++ b/internal/providers/pluginfw/resources/library/resource_library_acc_test.go @@ -24,7 +24,7 @@ func TestAccLibraryCreationPluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { repo = "https://pypi.org/dummy" @@ -54,7 +54,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { repo = "https://pypi.org/simple" @@ -80,7 +80,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { package = "networkx" diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go index c0047d55cc..b20f08deed 100644 --- a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go @@ -65,7 +65,7 @@ type QualityMonitorResource struct { } func (r *QualityMonitorResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go index 7f303d482e..0d0c49b2c0 100644 --- a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go +++ b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go @@ -55,7 +55,7 @@ func TestUcAccQualityMonitor(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -81,7 +81,7 @@ func TestUcAccQualityMonitor(t *testing.T) { } } - resource "databricks_quality_monitor_pluginframework" "testMonitorTimeseries" { + resource "databricks_quality_monitor" "testMonitorTimeseries" { table_name = databricks_sql_table.myTimeseries.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}" output_schema_name = databricks_schema.things.id @@ -104,7 +104,7 @@ func TestUcAccQualityMonitor(t *testing.T) { } } - resource "databricks_quality_monitor_pluginframework" "testMonitorSnapshot" { + resource "databricks_quality_monitor" "testMonitorSnapshot" { table_name = databricks_sql_table.mySnapshot.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}" output_schema_name = databricks_schema.things.id @@ -121,7 +121,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) { } acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -136,7 +136,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) { `, }, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -160,7 +160,7 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) { acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -176,8 +176,8 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) { }, acceptance.Step{ ImportState: true, - ResourceName: "databricks_quality_monitor_pluginframework.testMonitorInference", - ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor_pluginframework.testMonitorInference", "table_name"), + ResourceName: "databricks_quality_monitor.testMonitorInference", + ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor.testMonitorInference", "table_name"), ImportStateVerify: true, ImportStateVerifyIdentifierAttribute: "table_name", }, diff --git a/internal/providers/pluginfw/resources/volume/data_volumes.go b/internal/providers/pluginfw/resources/volume/data_volumes.go index 54eccf7bde..6a4af53ba0 100644 --- a/internal/providers/pluginfw/resources/volume/data_volumes.go +++ b/internal/providers/pluginfw/resources/volume/data_volumes.go @@ -35,7 +35,7 @@ type VolumesList struct { } func (d *VolumesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *VolumesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go b/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go index 0fdfc8aa50..3416d20f26 100644 --- a/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go +++ b/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go @@ -12,8 +12,8 @@ import ( func checkDataSourceVolumesPopulated(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { - _, ok := s.Modules[0].Resources["data.databricks_volumes_pluginframework.this"] - require.True(t, ok, "data.databricks_volumes_pluginframework.this has to be there") + _, ok := s.Modules[0].Resources["data.databricks_volumes.this"] + require.True(t, ok, "data.databricks_volumes.this has to be there") num_volumes, _ := strconv.Atoi(s.Modules[0].Outputs["volumes"].Value.(string)) assert.GreaterOrEqual(t, num_volumes, 1) return nil @@ -45,13 +45,13 @@ func TestUcAccDataSourceVolumes(t *testing.T) { schema_name = databricks_schema.things.name volume_type = "MANAGED" } - data "databricks_volumes_pluginframework" "this" { + data "databricks_volumes" "this" { catalog_name = databricks_catalog.sandbox.name schema_name = databricks_schema.things.name depends_on = [ databricks_volume.this ] } output "volumes" { - value = length(data.databricks_volumes_pluginframework.this.ids) + value = length(data.databricks_volumes.this.ids) } `, Check: checkDataSourceVolumesPopulated(t), diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 8136901ddf..134c838deb 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -32,6 +32,7 @@ import ( "github.com/databricks/terraform-provider-databricks/dashboards" "github.com/databricks/terraform-provider-databricks/finops" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw" "github.com/databricks/terraform-provider-databricks/jobs" "github.com/databricks/terraform-provider-databricks/logger" "github.com/databricks/terraform-provider-databricks/mlflow" @@ -72,7 +73,169 @@ func init() { // DatabricksProvider returns the entire terraform provider object func DatabricksProvider() *schema.Provider { + dataSourceMap := map[string]*schema.Resource{ // must be in alphabetical order + "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), + "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), + "databricks_aws_bucket_policy": aws.DataAwsBucketPolicy().ToResource(), + "databricks_aws_unity_catalog_assume_role_policy": aws.DataAwsUnityCatalogAssumeRolePolicy().ToResource(), + "databricks_aws_unity_catalog_policy": aws.DataAwsUnityCatalogPolicy().ToResource(), + "databricks_cluster": clusters.DataSourceCluster().ToResource(), + "databricks_clusters": clusters.DataSourceClusters().ToResource(), + "databricks_cluster_policy": policies.DataSourceClusterPolicy().ToResource(), + "databricks_catalog": catalog.DataSourceCatalog().ToResource(), + "databricks_catalogs": catalog.DataSourceCatalogs().ToResource(), + "databricks_current_config": mws.DataSourceCurrentConfiguration().ToResource(), + "databricks_current_metastore": catalog.DataSourceCurrentMetastore().ToResource(), + "databricks_current_user": scim.DataSourceCurrentUser().ToResource(), + "databricks_dbfs_file": storage.DataSourceDbfsFile().ToResource(), + "databricks_dbfs_file_paths": storage.DataSourceDbfsFilePaths().ToResource(), + "databricks_directory": workspace.DataSourceDirectory().ToResource(), + "databricks_external_location": catalog.DataSourceExternalLocation().ToResource(), + "databricks_external_locations": catalog.DataSourceExternalLocations().ToResource(), + "databricks_group": scim.DataSourceGroup().ToResource(), + "databricks_instance_pool": pools.DataSourceInstancePool().ToResource(), + "databricks_instance_profiles": aws.DataSourceInstanceProfiles().ToResource(), + "databricks_jobs": jobs.DataSourceJobs().ToResource(), + "databricks_job": jobs.DataSourceJob().ToResource(), + "databricks_metastore": catalog.DataSourceMetastore().ToResource(), + "databricks_metastores": catalog.DataSourceMetastores().ToResource(), + "databricks_mlflow_experiment": mlflow.DataSourceExperiment().ToResource(), + "databricks_mlflow_model": mlflow.DataSourceModel().ToResource(), + "databricks_mlflow_models": mlflow.DataSourceModels().ToResource(), + "databricks_mws_credentials": mws.DataSourceMwsCredentials().ToResource(), + "databricks_mws_workspaces": mws.DataSourceMwsWorkspaces().ToResource(), + "databricks_node_type": clusters.DataSourceNodeType().ToResource(), + "databricks_notebook": workspace.DataSourceNotebook().ToResource(), + "databricks_notebook_paths": workspace.DataSourceNotebookPaths().ToResource(), + "databricks_pipelines": pipelines.DataSourcePipelines().ToResource(), + "databricks_schema": catalog.DataSourceSchema().ToResource(), + "databricks_schemas": catalog.DataSourceSchemas().ToResource(), + "databricks_service_principal": scim.DataSourceServicePrincipal().ToResource(), + "databricks_service_principals": scim.DataSourceServicePrincipals().ToResource(), + "databricks_share": sharing.DataSourceShare().ToResource(), + "databricks_shares": sharing.DataSourceShares().ToResource(), + "databricks_spark_version": clusters.DataSourceSparkVersion().ToResource(), + "databricks_sql_warehouse": sql.DataSourceWarehouse().ToResource(), + "databricks_sql_warehouses": sql.DataSourceWarehouses().ToResource(), + "databricks_storage_credential": catalog.DataSourceStorageCredential().ToResource(), + "databricks_storage_credentials": catalog.DataSourceStorageCredentials().ToResource(), + "databricks_table": catalog.DataSourceTable().ToResource(), + "databricks_tables": catalog.DataSourceTables().ToResource(), + "databricks_views": catalog.DataSourceViews().ToResource(), + "databricks_volume": catalog.DataSourceVolume().ToResource(), + "databricks_volumes": catalog.DataSourceVolumes().ToResource(), + "databricks_user": scim.DataSourceUser().ToResource(), + "databricks_zones": clusters.DataSourceClusterZones().ToResource(), + } + + resourceMap := map[string]*schema.Resource{ // must be in alphabetical order + "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), + "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), + "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), + "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), + "databricks_azure_adls_gen2_mount": storage.ResourceAzureAdlsGen2Mount().ToResource(), + "databricks_azure_blob_mount": storage.ResourceAzureBlobMount().ToResource(), + "databricks_budget": finops.ResourceBudget().ToResource(), + "databricks_catalog": catalog.ResourceCatalog().ToResource(), + "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), + "databricks_connection": catalog.ResourceConnection().ToResource(), + "databricks_cluster": clusters.ResourceCluster().ToResource(), + "databricks_cluster_policy": policies.ResourceClusterPolicy().ToResource(), + "databricks_dashboard": dashboards.ResourceDashboard().ToResource(), + "databricks_dbfs_file": storage.ResourceDbfsFile().ToResource(), + "databricks_directory": workspace.ResourceDirectory().ToResource(), + "databricks_entitlements": scim.ResourceEntitlements().ToResource(), + "databricks_external_location": catalog.ResourceExternalLocation().ToResource(), + "databricks_file": storage.ResourceFile().ToResource(), + "databricks_git_credential": repos.ResourceGitCredential().ToResource(), + "databricks_global_init_script": workspace.ResourceGlobalInitScript().ToResource(), + "databricks_grant": catalog.ResourceGrant().ToResource(), + "databricks_grants": catalog.ResourceGrants().ToResource(), + "databricks_group": scim.ResourceGroup().ToResource(), + "databricks_group_instance_profile": aws.ResourceGroupInstanceProfile().ToResource(), + "databricks_group_member": scim.ResourceGroupMember().ToResource(), + "databricks_group_role": scim.ResourceGroupRole().ToResource(), + "databricks_instance_pool": pools.ResourceInstancePool().ToResource(), + "databricks_instance_profile": aws.ResourceInstanceProfile().ToResource(), + "databricks_ip_access_list": access.ResourceIPAccessList().ToResource(), + "databricks_job": jobs.ResourceJob().ToResource(), + "databricks_lakehouse_monitor": catalog.ResourceLakehouseMonitor().ToResource(), + "databricks_library": clusters.ResourceLibrary().ToResource(), + "databricks_metastore": catalog.ResourceMetastore().ToResource(), + "databricks_metastore_assignment": catalog.ResourceMetastoreAssignment().ToResource(), + "databricks_metastore_data_access": catalog.ResourceMetastoreDataAccess().ToResource(), + "databricks_mlflow_experiment": mlflow.ResourceMlflowExperiment().ToResource(), + "databricks_mlflow_model": mlflow.ResourceMlflowModel().ToResource(), + "databricks_mlflow_webhook": mlflow.ResourceMlflowWebhook().ToResource(), + "databricks_model_serving": serving.ResourceModelServing().ToResource(), + "databricks_mount": storage.ResourceMount().ToResource(), + "databricks_mws_customer_managed_keys": mws.ResourceMwsCustomerManagedKeys().ToResource(), + "databricks_mws_credentials": mws.ResourceMwsCredentials().ToResource(), + "databricks_mws_log_delivery": mws.ResourceMwsLogDelivery().ToResource(), + "databricks_mws_ncc_binding": mws.ResourceMwsNccBinding().ToResource(), + "databricks_mws_ncc_private_endpoint_rule": mws.ResourceMwsNccPrivateEndpointRule().ToResource(), + "databricks_mws_networks": mws.ResourceMwsNetworks().ToResource(), + "databricks_mws_network_connectivity_config": mws.ResourceMwsNetworkConnectivityConfig().ToResource(), + "databricks_mws_permission_assignment": mws.ResourceMwsPermissionAssignment().ToResource(), + "databricks_mws_private_access_settings": mws.ResourceMwsPrivateAccessSettings().ToResource(), + "databricks_mws_storage_configurations": mws.ResourceMwsStorageConfigurations().ToResource(), + "databricks_mws_vpc_endpoint": mws.ResourceMwsVpcEndpoint().ToResource(), + "databricks_mws_workspaces": mws.ResourceMwsWorkspaces().ToResource(), + "databricks_notebook": workspace.ResourceNotebook().ToResource(), + "databricks_notification_destination": settings.ResourceNotificationDestination().ToResource(), + "databricks_obo_token": tokens.ResourceOboToken().ToResource(), + "databricks_online_table": catalog.ResourceOnlineTable().ToResource(), + "databricks_permission_assignment": access.ResourcePermissionAssignment().ToResource(), + "databricks_permissions": permissions.ResourcePermissions().ToResource(), + "databricks_pipeline": pipelines.ResourcePipeline().ToResource(), + "databricks_provider": sharing.ResourceProvider().ToResource(), + "databricks_quality_monitor": catalog.ResourceQualityMonitor().ToResource(), + "databricks_recipient": sharing.ResourceRecipient().ToResource(), + "databricks_registered_model": catalog.ResourceRegisteredModel().ToResource(), + "databricks_repo": repos.ResourceRepo().ToResource(), + "databricks_schema": catalog.ResourceSchema().ToResource(), + "databricks_secret": secrets.ResourceSecret().ToResource(), + "databricks_secret_scope": secrets.ResourceSecretScope().ToResource(), + "databricks_secret_acl": secrets.ResourceSecretACL().ToResource(), + "databricks_service_principal": scim.ResourceServicePrincipal().ToResource(), + "databricks_service_principal_role": aws.ResourceServicePrincipalRole().ToResource(), + "databricks_service_principal_secret": tokens.ResourceServicePrincipalSecret().ToResource(), + "databricks_share": sharing.ResourceShare().ToResource(), + "databricks_sql_dashboard": sql.ResourceSqlDashboard().ToResource(), + "databricks_sql_endpoint": sql.ResourceSqlEndpoint().ToResource(), + "databricks_sql_global_config": sql.ResourceSqlGlobalConfig().ToResource(), + "databricks_sql_permissions": access.ResourceSqlPermissions().ToResource(), + "databricks_sql_query": sql.ResourceSqlQuery().ToResource(), + "databricks_sql_alert": sql.ResourceSqlAlert().ToResource(), + "databricks_sql_table": catalog.ResourceSqlTable().ToResource(), + "databricks_sql_visualization": sql.ResourceSqlVisualization().ToResource(), + "databricks_sql_widget": sql.ResourceSqlWidget().ToResource(), + "databricks_storage_credential": catalog.ResourceStorageCredential().ToResource(), + "databricks_system_schema": catalog.ResourceSystemSchema().ToResource(), + "databricks_table": catalog.ResourceTable().ToResource(), + "databricks_token": tokens.ResourceToken().ToResource(), + "databricks_user": scim.ResourceUser().ToResource(), + "databricks_user_instance_profile": aws.ResourceUserInstanceProfile().ToResource(), + "databricks_user_role": aws.ResourceUserRole().ToResource(), + "databricks_vector_search_endpoint": vectorsearch.ResourceVectorSearchEndpoint().ToResource(), + "databricks_vector_search_index": vectorsearch.ResourceVectorSearchIndex().ToResource(), + "databricks_volume": catalog.ResourceVolume().ToResource(), + "databricks_workspace_binding": catalog.ResourceWorkspaceBinding().ToResource(), + "databricks_workspace_conf": workspace.ResourceWorkspaceConf().ToResource(), + "databricks_workspace_file": workspace.ResourceWorkspaceFile().ToResource(), + } + + // Remove the resources and data sources that are being migrated to plugin framework + for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove() { + delete(resourceMap, resourceToRemove) + } + + for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove() { + delete(resourceMap, dataSourceToRemove) + } + p := &schema.Provider{ +<<<<<<< Updated upstream DataSourcesMap: map[string]*schema.Resource{ // must be in alphabetical order "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), @@ -226,6 +389,11 @@ func DatabricksProvider() *schema.Provider { "databricks_workspace_file": workspace.ResourceWorkspaceFile().ToResource(), }, Schema: providerSchema(), +======= + DataSourcesMap: dataSourceMap, + ResourcesMap: resourceMap, + Schema: providerSchema(), +>>>>>>> Stashed changes } for name, resource := range settings.AllSettingsResources() { p.ResourcesMap[fmt.Sprintf("databricks_%s_setting", name)] = resource.ToResource() From 096e6450a632b8b3347c41c6234442ff8766a316 Mon Sep 17 00:00:00 2001 From: edwardfeng-db Date: Mon, 21 Oct 2024 21:00:56 -0700 Subject: [PATCH 2/4] update --- internal/providers/pluginfw/pluginfw.go | 18 -- .../pluginfw/pluginfw_rollout_utils.go | 2 + internal/providers/sdkv2/sdkv2.go | 158 +----------------- 3 files changed, 4 insertions(+), 174 deletions(-) diff --git a/internal/providers/pluginfw/pluginfw.go b/internal/providers/pluginfw/pluginfw.go index 38e26d7d70..dab31321b2 100644 --- a/internal/providers/pluginfw/pluginfw.go +++ b/internal/providers/pluginfw/pluginfw.go @@ -16,15 +16,6 @@ import ( "github.com/databricks/terraform-provider-databricks/commands" "github.com/databricks/terraform-provider-databricks/common" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" -<<<<<<< Updated upstream - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" -======= ->>>>>>> Stashed changes "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -51,16 +42,7 @@ func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []fun } func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource { -<<<<<<< Updated upstream - return []func() datasource.DataSource{ - cluster.DataSourceCluster, - volume.DataSourceVolumes, - registered_model.DataSourceRegisteredModel, - notificationdestinations.DataSourceNotificationDestinations, - } -======= return getPluginFrameworkDataSourcesToRegister() ->>>>>>> Stashed changes } func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 535d5b565a..0c5535ad29 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -12,6 +12,7 @@ import ( "strings" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" @@ -39,6 +40,7 @@ var onboardedResources = []func() resource.Resource{ // List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2. var onboardedDataSources = []func() datasource.DataSource{ registered_model.DataSourceRegisteredModel, + notificationdestinations.DataSourceNotificationDestinations, } // GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 134c838deb..26c30c518b 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -130,6 +130,7 @@ func DatabricksProvider() *schema.Provider { resourceMap := map[string]*schema.Resource{ // must be in alphabetical order "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), + "databricks_alert": sql.ResourceAlert().ToResource(), "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), @@ -138,6 +139,7 @@ func DatabricksProvider() *schema.Provider { "databricks_budget": finops.ResourceBudget().ToResource(), "databricks_catalog": catalog.ResourceCatalog().ToResource(), "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), + "databricks_custom_app_integration": apps.ResourceCustomAppIntegration().ToResource(), "databricks_connection": catalog.ResourceConnection().ToResource(), "databricks_cluster": clusters.ResourceCluster().ToResource(), "databricks_cluster_policy": policies.ResourceClusterPolicy().ToResource(), @@ -235,165 +237,9 @@ func DatabricksProvider() *schema.Provider { } p := &schema.Provider{ -<<<<<<< Updated upstream - DataSourcesMap: map[string]*schema.Resource{ // must be in alphabetical order - "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), - "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), - "databricks_aws_bucket_policy": aws.DataAwsBucketPolicy().ToResource(), - "databricks_aws_unity_catalog_assume_role_policy": aws.DataAwsUnityCatalogAssumeRolePolicy().ToResource(), - "databricks_aws_unity_catalog_policy": aws.DataAwsUnityCatalogPolicy().ToResource(), - "databricks_cluster": clusters.DataSourceCluster().ToResource(), - "databricks_clusters": clusters.DataSourceClusters().ToResource(), - "databricks_cluster_policy": policies.DataSourceClusterPolicy().ToResource(), - "databricks_catalog": catalog.DataSourceCatalog().ToResource(), - "databricks_catalogs": catalog.DataSourceCatalogs().ToResource(), - "databricks_current_config": mws.DataSourceCurrentConfiguration().ToResource(), - "databricks_current_metastore": catalog.DataSourceCurrentMetastore().ToResource(), - "databricks_current_user": scim.DataSourceCurrentUser().ToResource(), - "databricks_dbfs_file": storage.DataSourceDbfsFile().ToResource(), - "databricks_dbfs_file_paths": storage.DataSourceDbfsFilePaths().ToResource(), - "databricks_directory": workspace.DataSourceDirectory().ToResource(), - "databricks_external_location": catalog.DataSourceExternalLocation().ToResource(), - "databricks_external_locations": catalog.DataSourceExternalLocations().ToResource(), - "databricks_group": scim.DataSourceGroup().ToResource(), - "databricks_instance_pool": pools.DataSourceInstancePool().ToResource(), - "databricks_instance_profiles": aws.DataSourceInstanceProfiles().ToResource(), - "databricks_jobs": jobs.DataSourceJobs().ToResource(), - "databricks_job": jobs.DataSourceJob().ToResource(), - "databricks_metastore": catalog.DataSourceMetastore().ToResource(), - "databricks_metastores": catalog.DataSourceMetastores().ToResource(), - "databricks_mlflow_experiment": mlflow.DataSourceExperiment().ToResource(), - "databricks_mlflow_model": mlflow.DataSourceModel().ToResource(), - "databricks_mlflow_models": mlflow.DataSourceModels().ToResource(), - "databricks_mws_credentials": mws.DataSourceMwsCredentials().ToResource(), - "databricks_mws_workspaces": mws.DataSourceMwsWorkspaces().ToResource(), - "databricks_node_type": clusters.DataSourceNodeType().ToResource(), - "databricks_notebook": workspace.DataSourceNotebook().ToResource(), - "databricks_notebook_paths": workspace.DataSourceNotebookPaths().ToResource(), - "databricks_pipelines": pipelines.DataSourcePipelines().ToResource(), - "databricks_schema": catalog.DataSourceSchema().ToResource(), - "databricks_schemas": catalog.DataSourceSchemas().ToResource(), - "databricks_service_principal": scim.DataSourceServicePrincipal().ToResource(), - "databricks_service_principals": scim.DataSourceServicePrincipals().ToResource(), - "databricks_share": sharing.DataSourceShare().ToResource(), - "databricks_shares": sharing.DataSourceShares().ToResource(), - "databricks_spark_version": clusters.DataSourceSparkVersion().ToResource(), - "databricks_sql_warehouse": sql.DataSourceWarehouse().ToResource(), - "databricks_sql_warehouses": sql.DataSourceWarehouses().ToResource(), - "databricks_storage_credential": catalog.DataSourceStorageCredential().ToResource(), - "databricks_storage_credentials": catalog.DataSourceStorageCredentials().ToResource(), - "databricks_table": catalog.DataSourceTable().ToResource(), - "databricks_tables": catalog.DataSourceTables().ToResource(), - "databricks_views": catalog.DataSourceViews().ToResource(), - "databricks_volume": catalog.DataSourceVolume().ToResource(), - "databricks_volumes": catalog.DataSourceVolumes().ToResource(), - "databricks_user": scim.DataSourceUser().ToResource(), - "databricks_zones": clusters.DataSourceClusterZones().ToResource(), - }, - ResourcesMap: map[string]*schema.Resource{ // must be in alphabetical order - "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), - "databricks_alert": sql.ResourceAlert().ToResource(), - "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), - "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), - "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), - "databricks_azure_adls_gen2_mount": storage.ResourceAzureAdlsGen2Mount().ToResource(), - "databricks_azure_blob_mount": storage.ResourceAzureBlobMount().ToResource(), - "databricks_budget": finops.ResourceBudget().ToResource(), - "databricks_catalog": catalog.ResourceCatalog().ToResource(), - "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), - "databricks_custom_app_integration": apps.ResourceCustomAppIntegration().ToResource(), - "databricks_connection": catalog.ResourceConnection().ToResource(), - "databricks_cluster": clusters.ResourceCluster().ToResource(), - "databricks_cluster_policy": policies.ResourceClusterPolicy().ToResource(), - "databricks_dashboard": dashboards.ResourceDashboard().ToResource(), - "databricks_dbfs_file": storage.ResourceDbfsFile().ToResource(), - "databricks_directory": workspace.ResourceDirectory().ToResource(), - "databricks_entitlements": scim.ResourceEntitlements().ToResource(), - "databricks_external_location": catalog.ResourceExternalLocation().ToResource(), - "databricks_file": storage.ResourceFile().ToResource(), - "databricks_git_credential": repos.ResourceGitCredential().ToResource(), - "databricks_global_init_script": workspace.ResourceGlobalInitScript().ToResource(), - "databricks_grant": catalog.ResourceGrant().ToResource(), - "databricks_grants": catalog.ResourceGrants().ToResource(), - "databricks_group": scim.ResourceGroup().ToResource(), - "databricks_group_instance_profile": aws.ResourceGroupInstanceProfile().ToResource(), - "databricks_group_member": scim.ResourceGroupMember().ToResource(), - "databricks_group_role": scim.ResourceGroupRole().ToResource(), - "databricks_instance_pool": pools.ResourceInstancePool().ToResource(), - "databricks_instance_profile": aws.ResourceInstanceProfile().ToResource(), - "databricks_ip_access_list": access.ResourceIPAccessList().ToResource(), - "databricks_job": jobs.ResourceJob().ToResource(), - "databricks_lakehouse_monitor": catalog.ResourceLakehouseMonitor().ToResource(), - "databricks_library": clusters.ResourceLibrary().ToResource(), - "databricks_metastore": catalog.ResourceMetastore().ToResource(), - "databricks_metastore_assignment": catalog.ResourceMetastoreAssignment().ToResource(), - "databricks_metastore_data_access": catalog.ResourceMetastoreDataAccess().ToResource(), - "databricks_mlflow_experiment": mlflow.ResourceMlflowExperiment().ToResource(), - "databricks_mlflow_model": mlflow.ResourceMlflowModel().ToResource(), - "databricks_mlflow_webhook": mlflow.ResourceMlflowWebhook().ToResource(), - "databricks_model_serving": serving.ResourceModelServing().ToResource(), - "databricks_mount": storage.ResourceMount().ToResource(), - "databricks_mws_customer_managed_keys": mws.ResourceMwsCustomerManagedKeys().ToResource(), - "databricks_mws_credentials": mws.ResourceMwsCredentials().ToResource(), - "databricks_mws_log_delivery": mws.ResourceMwsLogDelivery().ToResource(), - "databricks_mws_ncc_binding": mws.ResourceMwsNccBinding().ToResource(), - "databricks_mws_ncc_private_endpoint_rule": mws.ResourceMwsNccPrivateEndpointRule().ToResource(), - "databricks_mws_networks": mws.ResourceMwsNetworks().ToResource(), - "databricks_mws_network_connectivity_config": mws.ResourceMwsNetworkConnectivityConfig().ToResource(), - "databricks_mws_permission_assignment": mws.ResourceMwsPermissionAssignment().ToResource(), - "databricks_mws_private_access_settings": mws.ResourceMwsPrivateAccessSettings().ToResource(), - "databricks_mws_storage_configurations": mws.ResourceMwsStorageConfigurations().ToResource(), - "databricks_mws_vpc_endpoint": mws.ResourceMwsVpcEndpoint().ToResource(), - "databricks_mws_workspaces": mws.ResourceMwsWorkspaces().ToResource(), - "databricks_notebook": workspace.ResourceNotebook().ToResource(), - "databricks_notification_destination": settings.ResourceNotificationDestination().ToResource(), - "databricks_obo_token": tokens.ResourceOboToken().ToResource(), - "databricks_online_table": catalog.ResourceOnlineTable().ToResource(), - "databricks_permission_assignment": access.ResourcePermissionAssignment().ToResource(), - "databricks_permissions": permissions.ResourcePermissions().ToResource(), - "databricks_pipeline": pipelines.ResourcePipeline().ToResource(), - "databricks_provider": sharing.ResourceProvider().ToResource(), - "databricks_quality_monitor": catalog.ResourceQualityMonitor().ToResource(), - "databricks_recipient": sharing.ResourceRecipient().ToResource(), - "databricks_registered_model": catalog.ResourceRegisteredModel().ToResource(), - "databricks_repo": repos.ResourceRepo().ToResource(), - "databricks_schema": catalog.ResourceSchema().ToResource(), - "databricks_secret": secrets.ResourceSecret().ToResource(), - "databricks_secret_scope": secrets.ResourceSecretScope().ToResource(), - "databricks_secret_acl": secrets.ResourceSecretACL().ToResource(), - "databricks_service_principal": scim.ResourceServicePrincipal().ToResource(), - "databricks_service_principal_role": aws.ResourceServicePrincipalRole().ToResource(), - "databricks_service_principal_secret": tokens.ResourceServicePrincipalSecret().ToResource(), - "databricks_share": sharing.ResourceShare().ToResource(), - "databricks_sql_dashboard": sql.ResourceSqlDashboard().ToResource(), - "databricks_sql_endpoint": sql.ResourceSqlEndpoint().ToResource(), - "databricks_sql_global_config": sql.ResourceSqlGlobalConfig().ToResource(), - "databricks_sql_permissions": access.ResourceSqlPermissions().ToResource(), - "databricks_sql_query": sql.ResourceSqlQuery().ToResource(), - "databricks_sql_alert": sql.ResourceSqlAlert().ToResource(), - "databricks_sql_table": catalog.ResourceSqlTable().ToResource(), - "databricks_sql_visualization": sql.ResourceSqlVisualization().ToResource(), - "databricks_sql_widget": sql.ResourceSqlWidget().ToResource(), - "databricks_storage_credential": catalog.ResourceStorageCredential().ToResource(), - "databricks_system_schema": catalog.ResourceSystemSchema().ToResource(), - "databricks_table": catalog.ResourceTable().ToResource(), - "databricks_token": tokens.ResourceToken().ToResource(), - "databricks_user": scim.ResourceUser().ToResource(), - "databricks_user_instance_profile": aws.ResourceUserInstanceProfile().ToResource(), - "databricks_user_role": aws.ResourceUserRole().ToResource(), - "databricks_vector_search_endpoint": vectorsearch.ResourceVectorSearchEndpoint().ToResource(), - "databricks_vector_search_index": vectorsearch.ResourceVectorSearchIndex().ToResource(), - "databricks_volume": catalog.ResourceVolume().ToResource(), - "databricks_workspace_binding": catalog.ResourceWorkspaceBinding().ToResource(), - "databricks_workspace_conf": workspace.ResourceWorkspaceConf().ToResource(), - "databricks_workspace_file": workspace.ResourceWorkspaceFile().ToResource(), - }, - Schema: providerSchema(), -======= DataSourcesMap: dataSourceMap, ResourcesMap: resourceMap, Schema: providerSchema(), ->>>>>>> Stashed changes } for name, resource := range settings.AllSettingsResources() { p.ResourcesMap[fmt.Sprintf("databricks_%s_setting", name)] = resource.ToResource() From d9d7384a5db98121a8d4f94ea03437d576d2a9f6 Mon Sep 17 00:00:00 2001 From: edwardfeng-db Date: Mon, 21 Oct 2024 21:36:17 -0700 Subject: [PATCH 3/4] update --- internal/providers/pluginfw/pluginfw_rollout_utils.go | 1 + internal/providers/sdkv2/sdkv2.go | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 0c5535ad29..957ebb21b1 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -28,6 +28,7 @@ var migratedResources = []func() resource.Resource{ // List of data sources that have been migrated from SDK V2 to plugin framework var migratedDataSources = []func() datasource.DataSource{ + // TODO: Add DataSourceCluster back in after fixing unit tests. // cluster.DataSourceCluster, volume.DataSourceVolumes, } diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 26c30c518b..cd94c48555 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -228,12 +228,12 @@ func DatabricksProvider() *schema.Provider { } // Remove the resources and data sources that are being migrated to plugin framework - for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove() { - delete(resourceMap, resourceToRemove) + for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove() { + delete(dataSourceMap, dataSourceToRemove) } - for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove() { - delete(resourceMap, dataSourceToRemove) + for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove() { + delete(resourceMap, resourceToRemove) } p := &schema.Provider{ From cabad22bf07b2dbf43ca532a84de11dc875c8b05 Mon Sep 17 00:00:00 2001 From: edwardfeng-db Date: Mon, 21 Oct 2024 22:45:27 -0700 Subject: [PATCH 4/4] update --- internal/providers/pluginfw/pluginfw.go | 4 +- .../pluginfw/pluginfw_rollout_utils.go | 48 +++++++++++-------- internal/providers/providers.go | 4 +- internal/providers/providers_test_utils.go | 2 +- internal/providers/sdkv2/sdkv2.go | 6 +-- .../providers/sdkv2/tests/coverage_test.go | 3 +- .../providers/sdkv2/tests/generate_test.go | 3 +- 7 files changed, 40 insertions(+), 30 deletions(-) diff --git a/internal/providers/pluginfw/pluginfw.go b/internal/providers/pluginfw/pluginfw.go index dab31321b2..b3c1493b5d 100644 --- a/internal/providers/pluginfw/pluginfw.go +++ b/internal/providers/pluginfw/pluginfw.go @@ -38,11 +38,11 @@ type DatabricksProviderPluginFramework struct { var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil) func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource { - return getPluginFrameworkResourcesToRegister() + return getPluginFrameworkResourcesToRegister(ctx) } func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource { - return getPluginFrameworkDataSourcesToRegister() + return getPluginFrameworkDataSourcesToRegister(ctx) } func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 957ebb21b1..070651a3a1 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -45,26 +45,34 @@ var onboardedDataSources = []func() datasource.DataSource{ } // GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework -func getUseSdkV2Resources() []string { +func getUseSdkV2Resources(ctx context.Context) []string { useSdkV2 := os.Getenv("USE_SDK_V2_RESOURCES") - if useSdkV2 == "" { - return []string{} + useSdkV2Ctx := ctx.Value("USE_SDK_V2_RESOURCES") + combinedNames := "" + if useSdkV2 != "" && useSdkV2Ctx != "" { + combinedNames = useSdkV2 + "," + useSdkV2Ctx.(string) + } else { + combinedNames = useSdkV2 + useSdkV2Ctx.(string) } - return strings.Split(useSdkV2, ",") + return strings.Split(combinedNames, ",") } // GetUseSdkV2DataSources is a helper function to get name of data sources that should use SDK V2 instead of plugin framework -func getUseSdkV2DataSources() []string { +func getUseSdkV2DataSources(ctx context.Context) []string { useSdkV2 := os.Getenv("USE_SDK_V2_DATA_SOURCES") - if useSdkV2 == "" { - return []string{} + useSdkV2Ctx := ctx.Value("USE_SDK_V2_DATA_SOURCES") + combinedNames := "" + if useSdkV2 != "" && useSdkV2Ctx != "" { + combinedNames = useSdkV2 + "," + useSdkV2Ctx.(string) + } else { + combinedNames = useSdkV2 + useSdkV2Ctx.(string) } - return strings.Split(useSdkV2, ",") + return strings.Split(combinedNames, ",") } // Helper function to check if a resource should use be in SDK V2 instead of plugin framework -func shouldUseSdkV2Resource(resourceName string) bool { - useSdkV2Resources := getUseSdkV2Resources() +func shouldUseSdkV2Resource(ctx context.Context, resourceName string) bool { + useSdkV2Resources := getUseSdkV2Resources(ctx) for _, sdkV2Resource := range useSdkV2Resources { if resourceName == sdkV2Resource { return true @@ -74,8 +82,8 @@ func shouldUseSdkV2Resource(resourceName string) bool { } // Helper function to check if a data source should use be in SDK V2 instead of plugin framework -func shouldUseSdkV2DataSource(dataSourceName string) bool { - sdkV2DataSources := getUseSdkV2DataSources() +func shouldUseSdkV2DataSource(ctx context.Context, dataSourceName string) bool { + sdkV2DataSources := getUseSdkV2DataSources(ctx) for _, sdkV2DataSource := range sdkV2DataSources { if dataSourceName == sdkV2DataSource { return true @@ -85,13 +93,13 @@ func shouldUseSdkV2DataSource(dataSourceName string) bool { } // getPluginFrameworkResourcesToRegister is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework -func getPluginFrameworkResourcesToRegister() []func() resource.Resource { +func getPluginFrameworkResourcesToRegister(ctx context.Context) []func() resource.Resource { var resources []func() resource.Resource // Loop through the map and add resources if they're not specifically marked to use the SDK V2 for _, resourceFunc := range migratedResources { name := getResourceName(resourceFunc) - if !shouldUseSdkV2Resource(name) { + if !shouldUseSdkV2Resource(ctx, name) { resources = append(resources, resourceFunc) } } @@ -100,13 +108,13 @@ func getPluginFrameworkResourcesToRegister() []func() resource.Resource { } // getPluginFrameworkDataSourcesToRegister is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework -func getPluginFrameworkDataSourcesToRegister() []func() datasource.DataSource { +func getPluginFrameworkDataSourcesToRegister(ctx context.Context) []func() datasource.DataSource { var dataSources []func() datasource.DataSource // Loop through the map and add data sources if they're not specifically marked to use the SDK V2 for _, dataSourceFunc := range migratedDataSources { name := getDataSourceName(dataSourceFunc) - if !shouldUseSdkV2DataSource(name) { + if !shouldUseSdkV2DataSource(ctx, name) { dataSources = append(dataSources, dataSourceFunc) } } @@ -127,11 +135,11 @@ func getDataSourceName(dataSourceFunc func() datasource.DataSource) string { } // GetSdkV2ResourcesToRemove is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework -func GetSdkV2ResourcesToRemove() []string { +func GetSdkV2ResourcesToRemove(ctx context.Context) []string { resourcesToRemove := []string{} for _, resourceFunc := range migratedResources { name := getResourceName(resourceFunc) - if !shouldUseSdkV2Resource(name) { + if !shouldUseSdkV2Resource(ctx, name) { resourcesToRemove = append(resourcesToRemove, name) } } @@ -139,11 +147,11 @@ func GetSdkV2ResourcesToRemove() []string { } // GetSdkV2DataSourcesToRemove is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework -func GetSdkV2DataSourcesToRemove() []string { +func GetSdkV2DataSourcesToRemove(ctx context.Context) []string { dataSourcesToRemove := []string{} for _, dataSourceFunc := range migratedDataSources { name := getDataSourceName(dataSourceFunc) - if !shouldUseSdkV2DataSource(name) { + if !shouldUseSdkV2DataSource(ctx, name) { dataSourcesToRemove = append(dataSourcesToRemove, name) } } diff --git a/internal/providers/providers.go b/internal/providers/providers.go index 681df6f74d..c9ca759bc1 100644 --- a/internal/providers/providers.go +++ b/internal/providers/providers.go @@ -60,7 +60,7 @@ func GetProviderServer(ctx context.Context, options ...ServerOption) (tfprotov6. } sdkPluginProvider := serverOptions.sdkV2Provider if sdkPluginProvider == nil { - sdkPluginProvider = sdkv2.DatabricksProvider() + sdkPluginProvider = sdkv2.DatabricksProvider(ctx) } pluginFrameworkProvider := serverOptions.pluginFrameworkProvider if pluginFrameworkProvider == nil { @@ -68,7 +68,7 @@ func GetProviderServer(ctx context.Context, options ...ServerOption) (tfprotov6. } upgradedSdkPluginProvider, err := tf5to6server.UpgradeServer( - context.Background(), + ctx, sdkPluginProvider.GRPCProvider, ) if err != nil { diff --git a/internal/providers/providers_test_utils.go b/internal/providers/providers_test_utils.go index a3e9cfdd51..c9309563bd 100644 --- a/internal/providers/providers_test_utils.go +++ b/internal/providers/providers_test_utils.go @@ -141,7 +141,7 @@ func (pf providerFixture) configureProviderAndReturnClient_SDKv2(t *testing.T) ( for k, v := range pf.env { t.Setenv(k, v) } - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) ctx := context.Background() diags := p.Configure(ctx, terraform.NewResourceConfigRaw(pf.rawConfigSDKv2())) if len(diags) > 0 { diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index cd94c48555..e37593246c 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -72,7 +72,7 @@ func init() { } // DatabricksProvider returns the entire terraform provider object -func DatabricksProvider() *schema.Provider { +func DatabricksProvider(ctx context.Context) *schema.Provider { dataSourceMap := map[string]*schema.Resource{ // must be in alphabetical order "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), @@ -228,11 +228,11 @@ func DatabricksProvider() *schema.Provider { } // Remove the resources and data sources that are being migrated to plugin framework - for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove() { + for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove(ctx) { delete(dataSourceMap, dataSourceToRemove) } - for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove() { + for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove(ctx) { delete(resourceMap, resourceToRemove) } diff --git a/internal/providers/sdkv2/tests/coverage_test.go b/internal/providers/sdkv2/tests/coverage_test.go index a9fff7ff25..9308188bc7 100644 --- a/internal/providers/sdkv2/tests/coverage_test.go +++ b/internal/providers/sdkv2/tests/coverage_test.go @@ -1,6 +1,7 @@ package tests import ( + "context" "fmt" "io" "io/fs" @@ -144,7 +145,7 @@ func TestCoverageReport(t *testing.T) { files, err := recursiveChildren("..") assert.NoError(t, err) - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) var cr CoverageReport var longestResourceName, longestFieldName int diff --git a/internal/providers/sdkv2/tests/generate_test.go b/internal/providers/sdkv2/tests/generate_test.go index 4b12513670..ce6d420a50 100644 --- a/internal/providers/sdkv2/tests/generate_test.go +++ b/internal/providers/sdkv2/tests/generate_test.go @@ -1,6 +1,7 @@ package tests import ( + "context" "fmt" "go/ast" "go/parser" @@ -234,7 +235,7 @@ func TestGenerateTestCodeStubs(t *testing.T) { t.Logf("Got %d unit tests in total. %v", len(funcs), resourceTestStub{}) t.Skip() - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) for name, resource := range p.ResourcesMap { if name != "databricks_group_instance_profile" { continue