diff --git a/internal/providers/pluginfw/pluginfw.go b/internal/providers/pluginfw/pluginfw.go index db811d5ae2..b3c1493b5d 100644 --- a/internal/providers/pluginfw/pluginfw.go +++ b/internal/providers/pluginfw/pluginfw.go @@ -16,12 +16,6 @@ import ( "github.com/databricks/terraform-provider-databricks/commands" "github.com/databricks/terraform-provider-databricks/common" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -44,19 +38,11 @@ type DatabricksProviderPluginFramework struct { var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil) func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource { - return []func() resource.Resource{ - qualitymonitor.ResourceQualityMonitor, - library.ResourceLibrary, - } + return getPluginFrameworkResourcesToRegister(ctx) } func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource { - return []func() datasource.DataSource{ - cluster.DataSourceCluster, - volume.DataSourceVolumes, - registered_model.DataSourceRegisteredModel, - notificationdestinations.DataSourceNotificationDestinations, - } + return getPluginFrameworkDataSourcesToRegister(ctx) } func (p *DatabricksProviderPluginFramework) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go new file mode 100644 index 0000000000..070651a3a1 --- /dev/null +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -0,0 +1,159 @@ +package pluginfw + +// This file contains all of the utils for controlling the plugin framework rollout. +// For migrated resources and data sources, we can add them to the two maps below to have them registered with the plugin framework. +// Users can manually specify resources and data sources to use SDK V2 instead of the plugin framework by setting the USE_SDK_V2_RESOURCES and USE_SDK_V2_DATA_SOURCES environment variables. +// +// Example: USE_SDK_V2_RESOURCES="databricks_library" would force the library resource to use SDK V2 instead of the plugin framework. + +import ( + "context" + "os" + "strings" + + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// List of resources that have been migrated from SDK V2 to plugin framework +var migratedResources = []func() resource.Resource{ + qualitymonitor.ResourceQualityMonitor, + library.ResourceLibrary, +} + +// List of data sources that have been migrated from SDK V2 to plugin framework +var migratedDataSources = []func() datasource.DataSource{ + // TODO: Add DataSourceCluster back in after fixing unit tests. + // cluster.DataSourceCluster, + volume.DataSourceVolumes, +} + +// List of resources that have been onboarded to the plugin framework - not migrated from sdkv2. +var onboardedResources = []func() resource.Resource{ + // TODO Add resources here +} + +// List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2. +var onboardedDataSources = []func() datasource.DataSource{ + registered_model.DataSourceRegisteredModel, + notificationdestinations.DataSourceNotificationDestinations, +} + +// GetUseSdkV2DataSources is a helper function to get name of resources that should use SDK V2 instead of plugin framework +func getUseSdkV2Resources(ctx context.Context) []string { + useSdkV2 := os.Getenv("USE_SDK_V2_RESOURCES") + useSdkV2Ctx := ctx.Value("USE_SDK_V2_RESOURCES") + combinedNames := "" + if useSdkV2 != "" && useSdkV2Ctx != "" { + combinedNames = useSdkV2 + "," + useSdkV2Ctx.(string) + } else { + combinedNames = useSdkV2 + useSdkV2Ctx.(string) + } + return strings.Split(combinedNames, ",") +} + +// GetUseSdkV2DataSources is a helper function to get name of data sources that should use SDK V2 instead of plugin framework +func getUseSdkV2DataSources(ctx context.Context) []string { + useSdkV2 := os.Getenv("USE_SDK_V2_DATA_SOURCES") + useSdkV2Ctx := ctx.Value("USE_SDK_V2_DATA_SOURCES") + combinedNames := "" + if useSdkV2 != "" && useSdkV2Ctx != "" { + combinedNames = useSdkV2 + "," + useSdkV2Ctx.(string) + } else { + combinedNames = useSdkV2 + useSdkV2Ctx.(string) + } + return strings.Split(combinedNames, ",") +} + +// Helper function to check if a resource should use be in SDK V2 instead of plugin framework +func shouldUseSdkV2Resource(ctx context.Context, resourceName string) bool { + useSdkV2Resources := getUseSdkV2Resources(ctx) + for _, sdkV2Resource := range useSdkV2Resources { + if resourceName == sdkV2Resource { + return true + } + } + return false +} + +// Helper function to check if a data source should use be in SDK V2 instead of plugin framework +func shouldUseSdkV2DataSource(ctx context.Context, dataSourceName string) bool { + sdkV2DataSources := getUseSdkV2DataSources(ctx) + for _, sdkV2DataSource := range sdkV2DataSources { + if dataSourceName == sdkV2DataSource { + return true + } + } + return false +} + +// getPluginFrameworkResourcesToRegister is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework +func getPluginFrameworkResourcesToRegister(ctx context.Context) []func() resource.Resource { + var resources []func() resource.Resource + + // Loop through the map and add resources if they're not specifically marked to use the SDK V2 + for _, resourceFunc := range migratedResources { + name := getResourceName(resourceFunc) + if !shouldUseSdkV2Resource(ctx, name) { + resources = append(resources, resourceFunc) + } + } + + return append(resources, onboardedResources...) +} + +// getPluginFrameworkDataSourcesToRegister is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework +func getPluginFrameworkDataSourcesToRegister(ctx context.Context) []func() datasource.DataSource { + var dataSources []func() datasource.DataSource + + // Loop through the map and add data sources if they're not specifically marked to use the SDK V2 + for _, dataSourceFunc := range migratedDataSources { + name := getDataSourceName(dataSourceFunc) + if !shouldUseSdkV2DataSource(ctx, name) { + dataSources = append(dataSources, dataSourceFunc) + } + } + + return append(dataSources, onboardedDataSources...) +} + +func getResourceName(resourceFunc func() resource.Resource) string { + resp := resource.MetadataResponse{} + resourceFunc().Metadata(context.Background(), resource.MetadataRequest{ProviderTypeName: "databricks"}, &resp) + return resp.TypeName +} + +func getDataSourceName(dataSourceFunc func() datasource.DataSource) string { + resp := datasource.MetadataResponse{} + dataSourceFunc().Metadata(context.Background(), datasource.MetadataRequest{ProviderTypeName: "databricks"}, &resp) + return resp.TypeName +} + +// GetSdkV2ResourcesToRemove is a helper function to get the list of resources that are migrated away from sdkv2 to plugin framework +func GetSdkV2ResourcesToRemove(ctx context.Context) []string { + resourcesToRemove := []string{} + for _, resourceFunc := range migratedResources { + name := getResourceName(resourceFunc) + if !shouldUseSdkV2Resource(ctx, name) { + resourcesToRemove = append(resourcesToRemove, name) + } + } + return resourcesToRemove +} + +// GetSdkV2DataSourcesToRemove is a helper function to get the list of data sources that are migrated away from sdkv2 to plugin framework +func GetSdkV2DataSourcesToRemove(ctx context.Context) []string { + dataSourcesToRemove := []string{} + for _, dataSourceFunc := range migratedDataSources { + name := getDataSourceName(dataSourceFunc) + if !shouldUseSdkV2DataSource(ctx, name) { + dataSourcesToRemove = append(dataSourcesToRemove, name) + } + } + return dataSourcesToRemove +} diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster.go b/internal/providers/pluginfw/resources/cluster/data_cluster.go index 8d0499ccb8..7238e9ec8a 100644 --- a/internal/providers/pluginfw/resources/cluster/data_cluster.go +++ b/internal/providers/pluginfw/resources/cluster/data_cluster.go @@ -38,7 +38,7 @@ type ClusterInfo struct { } func (d *ClusterDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *ClusterDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go b/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go index cbac44de04..4157cad809 100644 --- a/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go +++ b/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go @@ -7,7 +7,7 @@ import ( ) const dataClusterTemplateById = ` - data "databricks_cluster_pluginframework" "by_id" { + data "databricks_cluster "by_id" { cluster_id = "{env.TEST_DEFAULT_CLUSTER_ID}" } ` @@ -21,8 +21,8 @@ func TestAccDataSourceClusterByID(t *testing.T) { func TestAccDataSourceClusterByName(t *testing.T) { acceptance.WorkspaceLevel(t, acceptance.Step{ Template: dataClusterTemplateById + ` - data "databricks_cluster_pluginframework" "by_name" { - cluster_name = data.databricks_cluster_pluginframework.by_id.cluster_name + data "databricks_cluster" "by_name" { + cluster_name = data.databricks_cluster.by_id.cluster_name }`, }) } diff --git a/internal/providers/pluginfw/resources/library/resource_library.go b/internal/providers/pluginfw/resources/library/resource_library.go index 1c999bd2ed..fffc0f95e3 100644 --- a/internal/providers/pluginfw/resources/library/resource_library.go +++ b/internal/providers/pluginfw/resources/library/resource_library.go @@ -69,7 +69,7 @@ type LibraryResource struct { } func (r *LibraryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/library/resource_library_acc_test.go b/internal/providers/pluginfw/resources/library/resource_library_acc_test.go index 153657ae41..11d2b42c84 100644 --- a/internal/providers/pluginfw/resources/library/resource_library_acc_test.go +++ b/internal/providers/pluginfw/resources/library/resource_library_acc_test.go @@ -24,7 +24,7 @@ func TestAccLibraryCreationPluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { repo = "https://pypi.org/dummy" @@ -54,7 +54,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { repo = "https://pypi.org/simple" @@ -80,7 +80,7 @@ func TestAccLibraryUpdatePluginFramework(t *testing.T) { "ResourceClass" = "SingleNode" } } - resource "databricks_library_pluginframework" "new_library" { + resource "databricks_library" "new_library" { cluster_id = databricks_cluster.this.id pypi { package = "networkx" diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go index c0047d55cc..b20f08deed 100644 --- a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go @@ -65,7 +65,7 @@ type QualityMonitorResource struct { } func (r *QualityMonitorResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go index 7f303d482e..0d0c49b2c0 100644 --- a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go +++ b/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go @@ -55,7 +55,7 @@ func TestUcAccQualityMonitor(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -81,7 +81,7 @@ func TestUcAccQualityMonitor(t *testing.T) { } } - resource "databricks_quality_monitor_pluginframework" "testMonitorTimeseries" { + resource "databricks_quality_monitor" "testMonitorTimeseries" { table_name = databricks_sql_table.myTimeseries.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}" output_schema_name = databricks_schema.things.id @@ -104,7 +104,7 @@ func TestUcAccQualityMonitor(t *testing.T) { } } - resource "databricks_quality_monitor_pluginframework" "testMonitorSnapshot" { + resource "databricks_quality_monitor" "testMonitorSnapshot" { table_name = databricks_sql_table.mySnapshot.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myTimeseries.name}" output_schema_name = databricks_schema.things.id @@ -121,7 +121,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) { } acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -136,7 +136,7 @@ func TestUcAccUpdateQualityMonitor(t *testing.T) { `, }, acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -160,7 +160,7 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) { acceptance.Step{ Template: commonPartQualityMonitoring + ` - resource "databricks_quality_monitor_pluginframework" "testMonitorInference" { + resource "databricks_quality_monitor" "testMonitorInference" { table_name = databricks_sql_table.myInferenceTable.id assets_dir = "/Shared/provider-test/databricks_quality_monitoring/${databricks_sql_table.myInferenceTable.name}" output_schema_name = databricks_schema.things.id @@ -176,8 +176,8 @@ func TestUcAccQualityMonitorImportPluginFramework(t *testing.T) { }, acceptance.Step{ ImportState: true, - ResourceName: "databricks_quality_monitor_pluginframework.testMonitorInference", - ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor_pluginframework.testMonitorInference", "table_name"), + ResourceName: "databricks_quality_monitor.testMonitorInference", + ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_quality_monitor.testMonitorInference", "table_name"), ImportStateVerify: true, ImportStateVerifyIdentifierAttribute: "table_name", }, diff --git a/internal/providers/pluginfw/resources/volume/data_volumes.go b/internal/providers/pluginfw/resources/volume/data_volumes.go index 54eccf7bde..6a4af53ba0 100644 --- a/internal/providers/pluginfw/resources/volume/data_volumes.go +++ b/internal/providers/pluginfw/resources/volume/data_volumes.go @@ -35,7 +35,7 @@ type VolumesList struct { } func (d *VolumesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *VolumesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go b/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go index 0fdfc8aa50..3416d20f26 100644 --- a/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go +++ b/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go @@ -12,8 +12,8 @@ import ( func checkDataSourceVolumesPopulated(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { - _, ok := s.Modules[0].Resources["data.databricks_volumes_pluginframework.this"] - require.True(t, ok, "data.databricks_volumes_pluginframework.this has to be there") + _, ok := s.Modules[0].Resources["data.databricks_volumes.this"] + require.True(t, ok, "data.databricks_volumes.this has to be there") num_volumes, _ := strconv.Atoi(s.Modules[0].Outputs["volumes"].Value.(string)) assert.GreaterOrEqual(t, num_volumes, 1) return nil @@ -45,13 +45,13 @@ func TestUcAccDataSourceVolumes(t *testing.T) { schema_name = databricks_schema.things.name volume_type = "MANAGED" } - data "databricks_volumes_pluginframework" "this" { + data "databricks_volumes" "this" { catalog_name = databricks_catalog.sandbox.name schema_name = databricks_schema.things.name depends_on = [ databricks_volume.this ] } output "volumes" { - value = length(data.databricks_volumes_pluginframework.this.ids) + value = length(data.databricks_volumes.this.ids) } `, Check: checkDataSourceVolumesPopulated(t), diff --git a/internal/providers/providers.go b/internal/providers/providers.go index 681df6f74d..c9ca759bc1 100644 --- a/internal/providers/providers.go +++ b/internal/providers/providers.go @@ -60,7 +60,7 @@ func GetProviderServer(ctx context.Context, options ...ServerOption) (tfprotov6. } sdkPluginProvider := serverOptions.sdkV2Provider if sdkPluginProvider == nil { - sdkPluginProvider = sdkv2.DatabricksProvider() + sdkPluginProvider = sdkv2.DatabricksProvider(ctx) } pluginFrameworkProvider := serverOptions.pluginFrameworkProvider if pluginFrameworkProvider == nil { @@ -68,7 +68,7 @@ func GetProviderServer(ctx context.Context, options ...ServerOption) (tfprotov6. } upgradedSdkPluginProvider, err := tf5to6server.UpgradeServer( - context.Background(), + ctx, sdkPluginProvider.GRPCProvider, ) if err != nil { diff --git a/internal/providers/providers_test_utils.go b/internal/providers/providers_test_utils.go index a3e9cfdd51..c9309563bd 100644 --- a/internal/providers/providers_test_utils.go +++ b/internal/providers/providers_test_utils.go @@ -141,7 +141,7 @@ func (pf providerFixture) configureProviderAndReturnClient_SDKv2(t *testing.T) ( for k, v := range pf.env { t.Setenv(k, v) } - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) ctx := context.Background() diags := p.Configure(ctx, terraform.NewResourceConfigRaw(pf.rawConfigSDKv2())) if len(diags) > 0 { diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 8136901ddf..e37593246c 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -32,6 +32,7 @@ import ( "github.com/databricks/terraform-provider-databricks/dashboards" "github.com/databricks/terraform-provider-databricks/finops" providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw" "github.com/databricks/terraform-provider-databricks/jobs" "github.com/databricks/terraform-provider-databricks/logger" "github.com/databricks/terraform-provider-databricks/mlflow" @@ -71,161 +72,174 @@ func init() { } // DatabricksProvider returns the entire terraform provider object -func DatabricksProvider() *schema.Provider { +func DatabricksProvider(ctx context.Context) *schema.Provider { + dataSourceMap := map[string]*schema.Resource{ // must be in alphabetical order + "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), + "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), + "databricks_aws_bucket_policy": aws.DataAwsBucketPolicy().ToResource(), + "databricks_aws_unity_catalog_assume_role_policy": aws.DataAwsUnityCatalogAssumeRolePolicy().ToResource(), + "databricks_aws_unity_catalog_policy": aws.DataAwsUnityCatalogPolicy().ToResource(), + "databricks_cluster": clusters.DataSourceCluster().ToResource(), + "databricks_clusters": clusters.DataSourceClusters().ToResource(), + "databricks_cluster_policy": policies.DataSourceClusterPolicy().ToResource(), + "databricks_catalog": catalog.DataSourceCatalog().ToResource(), + "databricks_catalogs": catalog.DataSourceCatalogs().ToResource(), + "databricks_current_config": mws.DataSourceCurrentConfiguration().ToResource(), + "databricks_current_metastore": catalog.DataSourceCurrentMetastore().ToResource(), + "databricks_current_user": scim.DataSourceCurrentUser().ToResource(), + "databricks_dbfs_file": storage.DataSourceDbfsFile().ToResource(), + "databricks_dbfs_file_paths": storage.DataSourceDbfsFilePaths().ToResource(), + "databricks_directory": workspace.DataSourceDirectory().ToResource(), + "databricks_external_location": catalog.DataSourceExternalLocation().ToResource(), + "databricks_external_locations": catalog.DataSourceExternalLocations().ToResource(), + "databricks_group": scim.DataSourceGroup().ToResource(), + "databricks_instance_pool": pools.DataSourceInstancePool().ToResource(), + "databricks_instance_profiles": aws.DataSourceInstanceProfiles().ToResource(), + "databricks_jobs": jobs.DataSourceJobs().ToResource(), + "databricks_job": jobs.DataSourceJob().ToResource(), + "databricks_metastore": catalog.DataSourceMetastore().ToResource(), + "databricks_metastores": catalog.DataSourceMetastores().ToResource(), + "databricks_mlflow_experiment": mlflow.DataSourceExperiment().ToResource(), + "databricks_mlflow_model": mlflow.DataSourceModel().ToResource(), + "databricks_mlflow_models": mlflow.DataSourceModels().ToResource(), + "databricks_mws_credentials": mws.DataSourceMwsCredentials().ToResource(), + "databricks_mws_workspaces": mws.DataSourceMwsWorkspaces().ToResource(), + "databricks_node_type": clusters.DataSourceNodeType().ToResource(), + "databricks_notebook": workspace.DataSourceNotebook().ToResource(), + "databricks_notebook_paths": workspace.DataSourceNotebookPaths().ToResource(), + "databricks_pipelines": pipelines.DataSourcePipelines().ToResource(), + "databricks_schema": catalog.DataSourceSchema().ToResource(), + "databricks_schemas": catalog.DataSourceSchemas().ToResource(), + "databricks_service_principal": scim.DataSourceServicePrincipal().ToResource(), + "databricks_service_principals": scim.DataSourceServicePrincipals().ToResource(), + "databricks_share": sharing.DataSourceShare().ToResource(), + "databricks_shares": sharing.DataSourceShares().ToResource(), + "databricks_spark_version": clusters.DataSourceSparkVersion().ToResource(), + "databricks_sql_warehouse": sql.DataSourceWarehouse().ToResource(), + "databricks_sql_warehouses": sql.DataSourceWarehouses().ToResource(), + "databricks_storage_credential": catalog.DataSourceStorageCredential().ToResource(), + "databricks_storage_credentials": catalog.DataSourceStorageCredentials().ToResource(), + "databricks_table": catalog.DataSourceTable().ToResource(), + "databricks_tables": catalog.DataSourceTables().ToResource(), + "databricks_views": catalog.DataSourceViews().ToResource(), + "databricks_volume": catalog.DataSourceVolume().ToResource(), + "databricks_volumes": catalog.DataSourceVolumes().ToResource(), + "databricks_user": scim.DataSourceUser().ToResource(), + "databricks_zones": clusters.DataSourceClusterZones().ToResource(), + } + + resourceMap := map[string]*schema.Resource{ // must be in alphabetical order + "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), + "databricks_alert": sql.ResourceAlert().ToResource(), + "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), + "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), + "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), + "databricks_azure_adls_gen2_mount": storage.ResourceAzureAdlsGen2Mount().ToResource(), + "databricks_azure_blob_mount": storage.ResourceAzureBlobMount().ToResource(), + "databricks_budget": finops.ResourceBudget().ToResource(), + "databricks_catalog": catalog.ResourceCatalog().ToResource(), + "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), + "databricks_custom_app_integration": apps.ResourceCustomAppIntegration().ToResource(), + "databricks_connection": catalog.ResourceConnection().ToResource(), + "databricks_cluster": clusters.ResourceCluster().ToResource(), + "databricks_cluster_policy": policies.ResourceClusterPolicy().ToResource(), + "databricks_dashboard": dashboards.ResourceDashboard().ToResource(), + "databricks_dbfs_file": storage.ResourceDbfsFile().ToResource(), + "databricks_directory": workspace.ResourceDirectory().ToResource(), + "databricks_entitlements": scim.ResourceEntitlements().ToResource(), + "databricks_external_location": catalog.ResourceExternalLocation().ToResource(), + "databricks_file": storage.ResourceFile().ToResource(), + "databricks_git_credential": repos.ResourceGitCredential().ToResource(), + "databricks_global_init_script": workspace.ResourceGlobalInitScript().ToResource(), + "databricks_grant": catalog.ResourceGrant().ToResource(), + "databricks_grants": catalog.ResourceGrants().ToResource(), + "databricks_group": scim.ResourceGroup().ToResource(), + "databricks_group_instance_profile": aws.ResourceGroupInstanceProfile().ToResource(), + "databricks_group_member": scim.ResourceGroupMember().ToResource(), + "databricks_group_role": scim.ResourceGroupRole().ToResource(), + "databricks_instance_pool": pools.ResourceInstancePool().ToResource(), + "databricks_instance_profile": aws.ResourceInstanceProfile().ToResource(), + "databricks_ip_access_list": access.ResourceIPAccessList().ToResource(), + "databricks_job": jobs.ResourceJob().ToResource(), + "databricks_lakehouse_monitor": catalog.ResourceLakehouseMonitor().ToResource(), + "databricks_library": clusters.ResourceLibrary().ToResource(), + "databricks_metastore": catalog.ResourceMetastore().ToResource(), + "databricks_metastore_assignment": catalog.ResourceMetastoreAssignment().ToResource(), + "databricks_metastore_data_access": catalog.ResourceMetastoreDataAccess().ToResource(), + "databricks_mlflow_experiment": mlflow.ResourceMlflowExperiment().ToResource(), + "databricks_mlflow_model": mlflow.ResourceMlflowModel().ToResource(), + "databricks_mlflow_webhook": mlflow.ResourceMlflowWebhook().ToResource(), + "databricks_model_serving": serving.ResourceModelServing().ToResource(), + "databricks_mount": storage.ResourceMount().ToResource(), + "databricks_mws_customer_managed_keys": mws.ResourceMwsCustomerManagedKeys().ToResource(), + "databricks_mws_credentials": mws.ResourceMwsCredentials().ToResource(), + "databricks_mws_log_delivery": mws.ResourceMwsLogDelivery().ToResource(), + "databricks_mws_ncc_binding": mws.ResourceMwsNccBinding().ToResource(), + "databricks_mws_ncc_private_endpoint_rule": mws.ResourceMwsNccPrivateEndpointRule().ToResource(), + "databricks_mws_networks": mws.ResourceMwsNetworks().ToResource(), + "databricks_mws_network_connectivity_config": mws.ResourceMwsNetworkConnectivityConfig().ToResource(), + "databricks_mws_permission_assignment": mws.ResourceMwsPermissionAssignment().ToResource(), + "databricks_mws_private_access_settings": mws.ResourceMwsPrivateAccessSettings().ToResource(), + "databricks_mws_storage_configurations": mws.ResourceMwsStorageConfigurations().ToResource(), + "databricks_mws_vpc_endpoint": mws.ResourceMwsVpcEndpoint().ToResource(), + "databricks_mws_workspaces": mws.ResourceMwsWorkspaces().ToResource(), + "databricks_notebook": workspace.ResourceNotebook().ToResource(), + "databricks_notification_destination": settings.ResourceNotificationDestination().ToResource(), + "databricks_obo_token": tokens.ResourceOboToken().ToResource(), + "databricks_online_table": catalog.ResourceOnlineTable().ToResource(), + "databricks_permission_assignment": access.ResourcePermissionAssignment().ToResource(), + "databricks_permissions": permissions.ResourcePermissions().ToResource(), + "databricks_pipeline": pipelines.ResourcePipeline().ToResource(), + "databricks_provider": sharing.ResourceProvider().ToResource(), + "databricks_quality_monitor": catalog.ResourceQualityMonitor().ToResource(), + "databricks_recipient": sharing.ResourceRecipient().ToResource(), + "databricks_registered_model": catalog.ResourceRegisteredModel().ToResource(), + "databricks_repo": repos.ResourceRepo().ToResource(), + "databricks_schema": catalog.ResourceSchema().ToResource(), + "databricks_secret": secrets.ResourceSecret().ToResource(), + "databricks_secret_scope": secrets.ResourceSecretScope().ToResource(), + "databricks_secret_acl": secrets.ResourceSecretACL().ToResource(), + "databricks_service_principal": scim.ResourceServicePrincipal().ToResource(), + "databricks_service_principal_role": aws.ResourceServicePrincipalRole().ToResource(), + "databricks_service_principal_secret": tokens.ResourceServicePrincipalSecret().ToResource(), + "databricks_share": sharing.ResourceShare().ToResource(), + "databricks_sql_dashboard": sql.ResourceSqlDashboard().ToResource(), + "databricks_sql_endpoint": sql.ResourceSqlEndpoint().ToResource(), + "databricks_sql_global_config": sql.ResourceSqlGlobalConfig().ToResource(), + "databricks_sql_permissions": access.ResourceSqlPermissions().ToResource(), + "databricks_sql_query": sql.ResourceSqlQuery().ToResource(), + "databricks_sql_alert": sql.ResourceSqlAlert().ToResource(), + "databricks_sql_table": catalog.ResourceSqlTable().ToResource(), + "databricks_sql_visualization": sql.ResourceSqlVisualization().ToResource(), + "databricks_sql_widget": sql.ResourceSqlWidget().ToResource(), + "databricks_storage_credential": catalog.ResourceStorageCredential().ToResource(), + "databricks_system_schema": catalog.ResourceSystemSchema().ToResource(), + "databricks_table": catalog.ResourceTable().ToResource(), + "databricks_token": tokens.ResourceToken().ToResource(), + "databricks_user": scim.ResourceUser().ToResource(), + "databricks_user_instance_profile": aws.ResourceUserInstanceProfile().ToResource(), + "databricks_user_role": aws.ResourceUserRole().ToResource(), + "databricks_vector_search_endpoint": vectorsearch.ResourceVectorSearchEndpoint().ToResource(), + "databricks_vector_search_index": vectorsearch.ResourceVectorSearchIndex().ToResource(), + "databricks_volume": catalog.ResourceVolume().ToResource(), + "databricks_workspace_binding": catalog.ResourceWorkspaceBinding().ToResource(), + "databricks_workspace_conf": workspace.ResourceWorkspaceConf().ToResource(), + "databricks_workspace_file": workspace.ResourceWorkspaceFile().ToResource(), + } + + // Remove the resources and data sources that are being migrated to plugin framework + for _, dataSourceToRemove := range pluginfw.GetSdkV2DataSourcesToRemove(ctx) { + delete(dataSourceMap, dataSourceToRemove) + } + + for _, resourceToRemove := range pluginfw.GetSdkV2ResourcesToRemove(ctx) { + delete(resourceMap, resourceToRemove) + } + p := &schema.Provider{ - DataSourcesMap: map[string]*schema.Resource{ // must be in alphabetical order - "databricks_aws_crossaccount_policy": aws.DataAwsCrossaccountPolicy().ToResource(), - "databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy().ToResource(), - "databricks_aws_bucket_policy": aws.DataAwsBucketPolicy().ToResource(), - "databricks_aws_unity_catalog_assume_role_policy": aws.DataAwsUnityCatalogAssumeRolePolicy().ToResource(), - "databricks_aws_unity_catalog_policy": aws.DataAwsUnityCatalogPolicy().ToResource(), - "databricks_cluster": clusters.DataSourceCluster().ToResource(), - "databricks_clusters": clusters.DataSourceClusters().ToResource(), - "databricks_cluster_policy": policies.DataSourceClusterPolicy().ToResource(), - "databricks_catalog": catalog.DataSourceCatalog().ToResource(), - "databricks_catalogs": catalog.DataSourceCatalogs().ToResource(), - "databricks_current_config": mws.DataSourceCurrentConfiguration().ToResource(), - "databricks_current_metastore": catalog.DataSourceCurrentMetastore().ToResource(), - "databricks_current_user": scim.DataSourceCurrentUser().ToResource(), - "databricks_dbfs_file": storage.DataSourceDbfsFile().ToResource(), - "databricks_dbfs_file_paths": storage.DataSourceDbfsFilePaths().ToResource(), - "databricks_directory": workspace.DataSourceDirectory().ToResource(), - "databricks_external_location": catalog.DataSourceExternalLocation().ToResource(), - "databricks_external_locations": catalog.DataSourceExternalLocations().ToResource(), - "databricks_group": scim.DataSourceGroup().ToResource(), - "databricks_instance_pool": pools.DataSourceInstancePool().ToResource(), - "databricks_instance_profiles": aws.DataSourceInstanceProfiles().ToResource(), - "databricks_jobs": jobs.DataSourceJobs().ToResource(), - "databricks_job": jobs.DataSourceJob().ToResource(), - "databricks_metastore": catalog.DataSourceMetastore().ToResource(), - "databricks_metastores": catalog.DataSourceMetastores().ToResource(), - "databricks_mlflow_experiment": mlflow.DataSourceExperiment().ToResource(), - "databricks_mlflow_model": mlflow.DataSourceModel().ToResource(), - "databricks_mlflow_models": mlflow.DataSourceModels().ToResource(), - "databricks_mws_credentials": mws.DataSourceMwsCredentials().ToResource(), - "databricks_mws_workspaces": mws.DataSourceMwsWorkspaces().ToResource(), - "databricks_node_type": clusters.DataSourceNodeType().ToResource(), - "databricks_notebook": workspace.DataSourceNotebook().ToResource(), - "databricks_notebook_paths": workspace.DataSourceNotebookPaths().ToResource(), - "databricks_pipelines": pipelines.DataSourcePipelines().ToResource(), - "databricks_schema": catalog.DataSourceSchema().ToResource(), - "databricks_schemas": catalog.DataSourceSchemas().ToResource(), - "databricks_service_principal": scim.DataSourceServicePrincipal().ToResource(), - "databricks_service_principals": scim.DataSourceServicePrincipals().ToResource(), - "databricks_share": sharing.DataSourceShare().ToResource(), - "databricks_shares": sharing.DataSourceShares().ToResource(), - "databricks_spark_version": clusters.DataSourceSparkVersion().ToResource(), - "databricks_sql_warehouse": sql.DataSourceWarehouse().ToResource(), - "databricks_sql_warehouses": sql.DataSourceWarehouses().ToResource(), - "databricks_storage_credential": catalog.DataSourceStorageCredential().ToResource(), - "databricks_storage_credentials": catalog.DataSourceStorageCredentials().ToResource(), - "databricks_table": catalog.DataSourceTable().ToResource(), - "databricks_tables": catalog.DataSourceTables().ToResource(), - "databricks_views": catalog.DataSourceViews().ToResource(), - "databricks_volume": catalog.DataSourceVolume().ToResource(), - "databricks_volumes": catalog.DataSourceVolumes().ToResource(), - "databricks_user": scim.DataSourceUser().ToResource(), - "databricks_zones": clusters.DataSourceClusterZones().ToResource(), - }, - ResourcesMap: map[string]*schema.Resource{ // must be in alphabetical order - "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), - "databricks_alert": sql.ResourceAlert().ToResource(), - "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), - "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), - "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), - "databricks_azure_adls_gen2_mount": storage.ResourceAzureAdlsGen2Mount().ToResource(), - "databricks_azure_blob_mount": storage.ResourceAzureBlobMount().ToResource(), - "databricks_budget": finops.ResourceBudget().ToResource(), - "databricks_catalog": catalog.ResourceCatalog().ToResource(), - "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), - "databricks_custom_app_integration": apps.ResourceCustomAppIntegration().ToResource(), - "databricks_connection": catalog.ResourceConnection().ToResource(), - "databricks_cluster": clusters.ResourceCluster().ToResource(), - "databricks_cluster_policy": policies.ResourceClusterPolicy().ToResource(), - "databricks_dashboard": dashboards.ResourceDashboard().ToResource(), - "databricks_dbfs_file": storage.ResourceDbfsFile().ToResource(), - "databricks_directory": workspace.ResourceDirectory().ToResource(), - "databricks_entitlements": scim.ResourceEntitlements().ToResource(), - "databricks_external_location": catalog.ResourceExternalLocation().ToResource(), - "databricks_file": storage.ResourceFile().ToResource(), - "databricks_git_credential": repos.ResourceGitCredential().ToResource(), - "databricks_global_init_script": workspace.ResourceGlobalInitScript().ToResource(), - "databricks_grant": catalog.ResourceGrant().ToResource(), - "databricks_grants": catalog.ResourceGrants().ToResource(), - "databricks_group": scim.ResourceGroup().ToResource(), - "databricks_group_instance_profile": aws.ResourceGroupInstanceProfile().ToResource(), - "databricks_group_member": scim.ResourceGroupMember().ToResource(), - "databricks_group_role": scim.ResourceGroupRole().ToResource(), - "databricks_instance_pool": pools.ResourceInstancePool().ToResource(), - "databricks_instance_profile": aws.ResourceInstanceProfile().ToResource(), - "databricks_ip_access_list": access.ResourceIPAccessList().ToResource(), - "databricks_job": jobs.ResourceJob().ToResource(), - "databricks_lakehouse_monitor": catalog.ResourceLakehouseMonitor().ToResource(), - "databricks_library": clusters.ResourceLibrary().ToResource(), - "databricks_metastore": catalog.ResourceMetastore().ToResource(), - "databricks_metastore_assignment": catalog.ResourceMetastoreAssignment().ToResource(), - "databricks_metastore_data_access": catalog.ResourceMetastoreDataAccess().ToResource(), - "databricks_mlflow_experiment": mlflow.ResourceMlflowExperiment().ToResource(), - "databricks_mlflow_model": mlflow.ResourceMlflowModel().ToResource(), - "databricks_mlflow_webhook": mlflow.ResourceMlflowWebhook().ToResource(), - "databricks_model_serving": serving.ResourceModelServing().ToResource(), - "databricks_mount": storage.ResourceMount().ToResource(), - "databricks_mws_customer_managed_keys": mws.ResourceMwsCustomerManagedKeys().ToResource(), - "databricks_mws_credentials": mws.ResourceMwsCredentials().ToResource(), - "databricks_mws_log_delivery": mws.ResourceMwsLogDelivery().ToResource(), - "databricks_mws_ncc_binding": mws.ResourceMwsNccBinding().ToResource(), - "databricks_mws_ncc_private_endpoint_rule": mws.ResourceMwsNccPrivateEndpointRule().ToResource(), - "databricks_mws_networks": mws.ResourceMwsNetworks().ToResource(), - "databricks_mws_network_connectivity_config": mws.ResourceMwsNetworkConnectivityConfig().ToResource(), - "databricks_mws_permission_assignment": mws.ResourceMwsPermissionAssignment().ToResource(), - "databricks_mws_private_access_settings": mws.ResourceMwsPrivateAccessSettings().ToResource(), - "databricks_mws_storage_configurations": mws.ResourceMwsStorageConfigurations().ToResource(), - "databricks_mws_vpc_endpoint": mws.ResourceMwsVpcEndpoint().ToResource(), - "databricks_mws_workspaces": mws.ResourceMwsWorkspaces().ToResource(), - "databricks_notebook": workspace.ResourceNotebook().ToResource(), - "databricks_notification_destination": settings.ResourceNotificationDestination().ToResource(), - "databricks_obo_token": tokens.ResourceOboToken().ToResource(), - "databricks_online_table": catalog.ResourceOnlineTable().ToResource(), - "databricks_permission_assignment": access.ResourcePermissionAssignment().ToResource(), - "databricks_permissions": permissions.ResourcePermissions().ToResource(), - "databricks_pipeline": pipelines.ResourcePipeline().ToResource(), - "databricks_provider": sharing.ResourceProvider().ToResource(), - "databricks_quality_monitor": catalog.ResourceQualityMonitor().ToResource(), - "databricks_recipient": sharing.ResourceRecipient().ToResource(), - "databricks_registered_model": catalog.ResourceRegisteredModel().ToResource(), - "databricks_repo": repos.ResourceRepo().ToResource(), - "databricks_schema": catalog.ResourceSchema().ToResource(), - "databricks_secret": secrets.ResourceSecret().ToResource(), - "databricks_secret_scope": secrets.ResourceSecretScope().ToResource(), - "databricks_secret_acl": secrets.ResourceSecretACL().ToResource(), - "databricks_service_principal": scim.ResourceServicePrincipal().ToResource(), - "databricks_service_principal_role": aws.ResourceServicePrincipalRole().ToResource(), - "databricks_service_principal_secret": tokens.ResourceServicePrincipalSecret().ToResource(), - "databricks_share": sharing.ResourceShare().ToResource(), - "databricks_sql_dashboard": sql.ResourceSqlDashboard().ToResource(), - "databricks_sql_endpoint": sql.ResourceSqlEndpoint().ToResource(), - "databricks_sql_global_config": sql.ResourceSqlGlobalConfig().ToResource(), - "databricks_sql_permissions": access.ResourceSqlPermissions().ToResource(), - "databricks_sql_query": sql.ResourceSqlQuery().ToResource(), - "databricks_sql_alert": sql.ResourceSqlAlert().ToResource(), - "databricks_sql_table": catalog.ResourceSqlTable().ToResource(), - "databricks_sql_visualization": sql.ResourceSqlVisualization().ToResource(), - "databricks_sql_widget": sql.ResourceSqlWidget().ToResource(), - "databricks_storage_credential": catalog.ResourceStorageCredential().ToResource(), - "databricks_system_schema": catalog.ResourceSystemSchema().ToResource(), - "databricks_table": catalog.ResourceTable().ToResource(), - "databricks_token": tokens.ResourceToken().ToResource(), - "databricks_user": scim.ResourceUser().ToResource(), - "databricks_user_instance_profile": aws.ResourceUserInstanceProfile().ToResource(), - "databricks_user_role": aws.ResourceUserRole().ToResource(), - "databricks_vector_search_endpoint": vectorsearch.ResourceVectorSearchEndpoint().ToResource(), - "databricks_vector_search_index": vectorsearch.ResourceVectorSearchIndex().ToResource(), - "databricks_volume": catalog.ResourceVolume().ToResource(), - "databricks_workspace_binding": catalog.ResourceWorkspaceBinding().ToResource(), - "databricks_workspace_conf": workspace.ResourceWorkspaceConf().ToResource(), - "databricks_workspace_file": workspace.ResourceWorkspaceFile().ToResource(), - }, - Schema: providerSchema(), + DataSourcesMap: dataSourceMap, + ResourcesMap: resourceMap, + Schema: providerSchema(), } for name, resource := range settings.AllSettingsResources() { p.ResourcesMap[fmt.Sprintf("databricks_%s_setting", name)] = resource.ToResource() diff --git a/internal/providers/sdkv2/tests/coverage_test.go b/internal/providers/sdkv2/tests/coverage_test.go index a9fff7ff25..9308188bc7 100644 --- a/internal/providers/sdkv2/tests/coverage_test.go +++ b/internal/providers/sdkv2/tests/coverage_test.go @@ -1,6 +1,7 @@ package tests import ( + "context" "fmt" "io" "io/fs" @@ -144,7 +145,7 @@ func TestCoverageReport(t *testing.T) { files, err := recursiveChildren("..") assert.NoError(t, err) - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) var cr CoverageReport var longestResourceName, longestFieldName int diff --git a/internal/providers/sdkv2/tests/generate_test.go b/internal/providers/sdkv2/tests/generate_test.go index 4b12513670..ce6d420a50 100644 --- a/internal/providers/sdkv2/tests/generate_test.go +++ b/internal/providers/sdkv2/tests/generate_test.go @@ -1,6 +1,7 @@ package tests import ( + "context" "fmt" "go/ast" "go/parser" @@ -234,7 +235,7 @@ func TestGenerateTestCodeStubs(t *testing.T) { t.Logf("Got %d unit tests in total. %v", len(funcs), resourceTestStub{}) t.Skip() - p := sdkv2.DatabricksProvider() + p := sdkv2.DatabricksProvider(context.Background()) for name, resource := range p.ResourcesMap { if name != "databricks_group_instance_profile" { continue