diff --git a/docs/guides/experimental-exporter.md b/docs/guides/experimental-exporter.md index 9bf5e594d..b3ea7048a 100644 --- a/docs/guides/experimental-exporter.md +++ b/docs/guides/experimental-exporter.md @@ -128,7 +128,7 @@ Services are just logical groups of resources used for filtering and organizatio * `policies` - **listing** [databricks_cluster_policy](../resources/cluster_policy). * `pools` - **listing** [instance pools](../resources/instance_pool.md). * `queries` - **listing** [databricks_query](../resources/query.md). -* `repos` - **listing** [databricks_repo](../resources/repo.md) +* `repos` - **listing** [databricks_repo](../resources/repo.md) (both classical Repos in `/Repos` and Git Folders in artbitrary locations). * `secrets` - **listing** [databricks_secret_scope](../resources/secret_scope.md) along with [keys](../resources/secret.md) and [ACLs](../resources/secret_acl.md). * `settings` - **listing** [databricks_notification_destination](../resources/notification_destination.md). * `sql-dashboards` - **listing** Legacy [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md). diff --git a/exporter/context.go b/exporter/context.go index bfba5d24f..86a8a080b 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -52,6 +52,11 @@ import ( type resourceChannel chan *resource +type gitInfoCacheEntry struct { + IsPresent bool + RepoId int64 +} + type importContext struct { // not modified/used only in single thread Module string @@ -139,6 +144,9 @@ type importContext struct { oldWorkspaceObjects []workspace.ObjectStatus oldWorkspaceObjectMapping map[int64]string + gitInfoCache map[string]gitInfoCacheEntry + gitInfoCacheMutex sync.RWMutex + builtInPolicies map[string]compute.PolicyFamily builtInPoliciesMutex sync.Mutex @@ -256,6 +264,7 @@ func newImportContext(c *common.DatabricksClient) *importContext { allWorkspaceObjects: []workspace.ObjectStatus{}, oldWorkspaceObjects: []workspace.ObjectStatus{}, oldWorkspaceObjectMapping: map[int64]string{}, + gitInfoCache: map[string]gitInfoCacheEntry{}, workspaceConfKeys: workspaceConfKeys, shImports: map[string]bool{}, notebooksFormat: "SOURCE", diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index ece51b896..b8cbd53ac 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -30,6 +30,7 @@ import ( "github.com/databricks/terraform-provider-databricks/clusters" "github.com/databricks/terraform-provider-databricks/commands" "github.com/databricks/terraform-provider-databricks/common" + "github.com/databricks/terraform-provider-databricks/internal/service/workspace_tf" "github.com/databricks/terraform-provider-databricks/jobs" "github.com/databricks/terraform-provider-databricks/qa" "github.com/databricks/terraform-provider-databricks/repos" @@ -288,7 +289,7 @@ var emptyConnections = qa.HTTPFixture{ var emptyRepos = qa.HTTPFixture{ Method: "GET", ReuseRequest: true, - Resource: "/api/2.0/repos?", + Resource: "/api/2.0/repos?path_prefix=%2FWorkspace", Response: repos.ReposListResponse{}, } @@ -830,6 +831,16 @@ func TestImportingClusters(t *testing.T) { meAdminFixture, noCurrentMetastoreAttached, emptyRepos, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FUsers%2Fuser%40domain.com%2Flibs%2Ftest.whl&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FUsers%2Fuser%40domain.com%2Frepo%2Ftest.sh&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, { Method: "GET", Resource: "/api/2.0/preview/scim/v2/Groups?", @@ -1494,6 +1505,11 @@ func TestImportingJobs_JobListMultiTask(t *testing.T) { }, }, }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2Ffoo%2Fbar.py&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, }, func(ctx context.Context, client *common.DatabricksClient) { ic := newImportContext(client) @@ -1743,7 +1759,7 @@ func TestImportingRepos(t *testing.T) { userReadFixture, { Method: "GET", - Resource: "/api/2.0/repos?", + Resource: "/api/2.0/repos?path_prefix=%2FWorkspace", Response: repos.ReposListResponse{ Repos: []repos.ReposInformation{ resp, @@ -2184,6 +2200,16 @@ func TestImportingDLTPipelines(t *testing.T) { Resource: "/api/2.0/permissions/files/789?", Response: getJSONObject("test-data/get-workspace-file-permissions.json"), }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FUsers%2Fuser%40domain.com%2FTest%20DLT&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2Finit.sh&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, }, func(ctx context.Context, client *common.DatabricksClient) { tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) @@ -2277,6 +2303,16 @@ func TestImportingDLTPipelinesMatchingOnly(t *testing.T) { Resource: "/api/2.0/instance-profiles/list", Response: getJSONObject("test-data/list-instance-profiles.json"), }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FUsers%2Fuser%40domain.com%2FTest%20DLT&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2Finit.sh&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, }, func(ctx context.Context, client *common.DatabricksClient) { tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) @@ -2975,6 +3011,11 @@ func TestImportingLakeviewDashboards(t *testing.T) { WarehouseId: "1234", }, }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FDashboard1.lvdash.json&return_git_info=true", + Response: workspace_tf.ObjectInfo{}, + }, }, func(ctx context.Context, client *common.DatabricksClient) { tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) diff --git a/exporter/importables.go b/exporter/importables.go index a08cfc7c1..5465e1afa 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -323,6 +323,8 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "init_scripts.workspace.destination", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "init_scripts.workspace.destination", Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, List: func(ic *importContext) error { clusters, err := clusters.NewClustersAPI(ic.Context, ic.Client).List() @@ -470,6 +472,8 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "task.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "task.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "task.notebook_task.base_parameters", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "task.notebook_task.notebook_path", Resource: "databricks_repo", Match: "path", @@ -492,6 +496,8 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "job_cluster.new_cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, Import: func(ic *importContext, r *resource) error { var job jobs.JobSettingsResource @@ -579,8 +585,8 @@ var resourcesMap map[string]importable = map[string]importable{ } if task.DbtTask.Source == "WORKSPACE" { directory := task.DbtTask.ProjectDirectory - if strings.HasPrefix(directory, "/Repos") { - ic.emitRepoByPath(directory) + if ic.isInRepoOrGitFolder(directory, true) { + ic.emitRepoOrGitFolder(directory, true) } else { // Traverse the dbt project directory and emit all objects found in it nbAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) @@ -1456,40 +1462,51 @@ var resourcesMap map[string]importable = map[string]importable{ return nameNormalizationRegex.ReplaceAllString(name[7:], "_") + "_" + d.Id() }, Search: func(ic *importContext, r *resource) error { - reposAPI := repos.NewReposAPI(ic.Context, ic.Client) - notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) - repoDir, err := notebooksAPI.Read(r.Value) + repoDir, err := ic.workspaceClient.Workspace.GetStatusByPath(ic.Context, r.Value) if err != nil { return err } - repo, err := reposAPI.Read(fmt.Sprintf("%d", repoDir.ObjectID)) - if err != nil { - return err + if repoDir.ObjectType != sdk_workspace.ObjectTypeRepo { + return fmt.Errorf("object %s is not a repo", r.Value) + } + if repoDir.ResourceId != "" { + r.ID = repoDir.ResourceId + } else { + r.ID = strconv.FormatInt(repoDir.ObjectId, 10) } - r.ID = fmt.Sprintf("%d", repo.ID) return nil }, List: func(ic *importContext) error { - objList, err := repos.NewReposAPI(ic.Context, ic.Client).ListAll() - if err != nil { - return err - } - for offset, repo := range objList { + it := ic.workspaceClient.Repos.List(ic.Context, sdk_workspace.ListReposRequest{PathPrefix: "/Workspace"}) + i := 1 + for it.HasNext(ic.Context) { + repo, err := it.Next(ic.Context) + if err != nil { + return err + } if repo.Url != "" { ic.Emit(&resource{ Resource: "databricks_repo", - ID: fmt.Sprintf("%d", repo.ID), + ID: strconv.FormatInt(repo.Id, 10), }) } else { log.Printf("[WARN] ignoring databricks_repo without Git provider. Path: %s", repo.Path) ic.addIgnoredResource(fmt.Sprintf("databricks_repo. path=%s", repo.Path)) } - log.Printf("[INFO] Scanned %d of %d repos", offset+1, len(objList)) + if i%50 == 0 { + log.Printf("[INFO] Scanned %d repos", i) + } + i++ } return nil }, Import: func(ic *importContext, r *resource) error { - ic.emitUserOrServicePrincipalForPath(r.Data.Get("path").(string), "/Repos") + path := maybeStripWorkspacePrefix(r.Data.Get("path").(string)) + if strings.HasPrefix(path, "/Repos") { + ic.emitUserOrServicePrincipalForPath(path, "/Repos") + } else if strings.HasPrefix(path, "/Users") { + ic.emitUserOrServicePrincipalForPath(path, "/Users") + } ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/repos/%s", r.ID), "repo_"+ic.Importables["databricks_repo"].Name(ic, r.Data)) return nil @@ -1518,12 +1535,15 @@ var resourcesMap map[string]importable = map[string]importable{ } return shouldIgnore }, - Depends: []reference{ {Path: "path", Resource: "databricks_user", Match: "repos", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "path", Resource: "databricks_service_principal", Match: "repos", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "path", Resource: "databricks_user", Match: "home", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "path", Resource: "databricks_service_principal", Match: "home", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, }, "databricks_workspace_conf": { @@ -2236,6 +2256,8 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, {Path: "cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "workspace_path", MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, + {Path: "cluster.init_scripts.workspace.destination", Resource: "databricks_repo", Match: "path", + MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, }, "databricks_directory": { @@ -3436,8 +3458,8 @@ var resourcesMap map[string]importable = map[string]importable{ }, Import: func(ic *importContext, r *resource) error { path := r.Data.Get("path").(string) - if strings.HasPrefix(path, "/Repos") { - ic.emitRepoByPath(path) + if ic.isInRepoOrGitFolder(path, false) { + ic.emitRepoOrGitFolder(path, false) return nil } parts := strings.Split(path, "/") @@ -3459,10 +3481,7 @@ var resourcesMap map[string]importable = map[string]importable{ "dashboard_"+ic.Importables["databricks_dashboard"].Name(ic, r.Data)) parentPath := r.Data.Get("parent_path").(string) if parentPath != "" && parentPath != "/" { - ic.Emit(&resource{ - Resource: "databricks_directory", - ID: parentPath, - }) + ic.emitDirectoryOrRepo(parentPath) } warehouseId := r.Data.Get("warehouse_id").(string) if warehouseId != "" { @@ -3478,7 +3497,7 @@ var resourcesMap map[string]importable = map[string]importable{ return pathString == "dashboard_change_detected" || shouldOmitMd5Field(ic, pathString, as, d) }, Ignore: func(ic *importContext, r *resource) bool { - return strings.HasPrefix(r.Data.Get("path").(string), "/Repos") || strings.HasPrefix(r.Data.Get("parent_path").(string), "/Repos") + return ic.isInRepoOrGitFolder(r.Data.Get("path").(string), false) || ic.isInRepoOrGitFolder(r.Data.Get("parent_path").(string), true) }, Depends: []reference{ {Path: "file_path", File: true}, diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 34a25b88c..e735da5d4 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -58,6 +58,7 @@ func importContextForTest() *importContext { allSps: map[string]scim.User{}, channels: makeResourcesChannels(), oldWorkspaceObjectMapping: map[int64]string{}, + gitInfoCache: map[string]gitInfoCacheEntry{}, exportDeletedUsersAssets: false, ignoredResources: map[string]struct{}{}, deletedResources: map[string]struct{}{}, @@ -1525,29 +1526,67 @@ func TestEmitSqlParent(t *testing.T) { } func TestEmitFilesFromSlice(t *testing.T) { - ic := importContextForTest() - ic.enableServices("storage,notebooks,wsfiles") - ic.emitFilesFromSlice([]string{ - "dbfs:/FileStore/test.txt", - "/Workspace/Shared/test.txt", - "nothing", + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FShared%2Ftest.txt&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FShared%2Fgit%2Ftest.txt&return_git_info=true", + Response: workspace.ObjectStatus{ + GitInfo: &sdk_workspace.RepoInfo{ + Id: 1234, + }, + }, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + ic := importContextForTestWithClient(ctx, client) + ic.enableServices("storage,notebooks,wsfiles,repos") + ic.emitFilesFromSlice([]string{ + "dbfs:/FileStore/test.txt", + "/Workspace/Shared/test.txt", + "/Workspace/Shared/git/test.txt", + "nothing", + }) + assert.Equal(t, 3, len(ic.testEmits)) + assert.Contains(t, ic.testEmits, "databricks_dbfs_file[] (id: dbfs:/FileStore/test.txt)") + assert.Contains(t, ic.testEmits, "databricks_workspace_file[] (id: /Shared/test.txt)") + assert.Contains(t, ic.testEmits, "databricks_repo[] (id: 1234)") }) - assert.Equal(t, 2, len(ic.testEmits)) - assert.Contains(t, ic.testEmits, "databricks_dbfs_file[] (id: dbfs:/FileStore/test.txt)") - assert.Contains(t, ic.testEmits, "databricks_workspace_file[] (id: /Shared/test.txt)") } func TestEmitFilesFromMap(t *testing.T) { - ic := importContextForTest() - ic.enableServices("storage,notebooks,wsfiles") - ic.emitFilesFromMap(map[string]string{ - "k1": "dbfs:/FileStore/test.txt", - "k2": "/Workspace/Shared/test.txt", - "k3": "nothing", - }) - assert.Equal(t, 2, len(ic.testEmits)) - assert.Contains(t, ic.testEmits, "databricks_dbfs_file[] (id: dbfs:/FileStore/test.txt)") - assert.Contains(t, ic.testEmits, "databricks_workspace_file[] (id: /Shared/test.txt)") + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FShared%2Ftest.txt&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FShared%2Fgit%2Ftest.txt&return_git_info=true", + Response: workspace.ObjectStatus{ + GitInfo: &sdk_workspace.RepoInfo{ + Id: 1234, + }, + }, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + ic := importContextForTestWithClient(ctx, client) + ic.enableServices("storage,notebooks,wsfiles,repos") + ic.emitFilesFromMap(map[string]string{ + "k1": "dbfs:/FileStore/test.txt", + "k2": "/Workspace/Shared/test.txt", + "k3": "nothing", + "k4": "/Workspace/Shared/git/test.txt", + }) + assert.Equal(t, 3, len(ic.testEmits)) + assert.Contains(t, ic.testEmits, "databricks_dbfs_file[] (id: dbfs:/FileStore/test.txt)") + assert.Contains(t, ic.testEmits, "databricks_workspace_file[] (id: /Shared/test.txt)") + assert.Contains(t, ic.testEmits, "databricks_repo[] (id: 1234)") + }) } func TestStorageCredentialListFails(t *testing.T) { diff --git a/exporter/util.go b/exporter/util.go index 5e4f53dca..44e7d523a 100644 --- a/exporter/util.go +++ b/exporter/util.go @@ -83,9 +83,8 @@ func (ic *importContext) emitIfDbfsFile(path string) { } func (ic *importContext) emitIfWsfsFile(path string) { - if strings.HasPrefix(path, "/Workspace/") { - normalPath := strings.TrimPrefix(path, "/Workspace") - ic.emitWorkspaceFileOrRepo(normalPath) + if hasWorkspacePrefix(path) { + ic.emitWorkspaceFileOrRepo(maybeStripWorkspacePrefix(path)) } } diff --git a/exporter/util_test.go b/exporter/util_test.go index 912baa78b..e00144019 100644 --- a/exporter/util_test.go +++ b/exporter/util_test.go @@ -225,6 +225,11 @@ func TestEmitNotebookOrRepo(t *testing.T) { userListIdUsernameFixture2, userListFixture, userReadFixture, + { + Method: "GET", + Resource: "/api/2.0/workspace/get-status?path=%2FUsers%2Fuser%40domain.com%2Fabc&return_git_info=true", + Response: workspace.ObjectStatus{}, + }, }, func(ctx context.Context, client *common.DatabricksClient) { ic := importContextForTestWithClient(ctx, client) ic.enableServices("notebooks") diff --git a/exporter/util_workspace.go b/exporter/util_workspace.go index 8dcbefbaf..f8c0c371f 100644 --- a/exporter/util_workspace.go +++ b/exporter/util_workspace.go @@ -29,6 +29,7 @@ func (ic *importContext) emitRepoByPath(path string) { // Path to Repos objects consits of following parts: /Repos, folder, repository, path inside Repo. // Because it starts with `/`, it will produce empty string as first element in the slice. // And we're stopping splitting to avoid producing too many not necessary parts, so we have 5 parts only. + path = maybeStripWorkspacePrefix(path) parts := strings.SplitN(path, "/", 5) if len(parts) >= 4 { ic.Emit(&resource{ @@ -42,20 +43,91 @@ func (ic *importContext) emitRepoByPath(path string) { } func isRepoPath(path string) bool { + // TODO: call GetStatus if we don't have /Repos prefix? Or add a separate function to check if we have GitFolder? return strings.HasPrefix(path, "/Repos") || strings.HasPrefix(path, "/Workspace/Repos") } -func maybeStringWorkspacePrefix(path string) string { - if strings.HasPrefix(path, "/Workspace/") { +func hasWorkspacePrefix(path string) bool { + return strings.HasPrefix(path, "/Workspace/") +} + +func maybeStripWorkspacePrefix(path string) string { + if hasWorkspacePrefix(path) { return path[10:] } return path } +func (ic *importContext) isInRepoOrGitFolder(path string, isDirectory bool) bool { + if isRepoPath(path) { + return true + } + isInGit, _ := ic.isInGitFolder(path, isDirectory) + return isInGit +} + +func (ic *importContext) emitRepoOrGitFolder(path string, isDirectory bool) { + if isRepoPath(path) { + ic.emitRepoByPath(path) + } else if ok, repoId := ic.isInGitFolder(path, isDirectory); ok { + ic.Emit(&resource{ + Resource: "databricks_repo", + ID: strconv.FormatInt(repoId, 10), + }) + } else { + log.Printf("[WARN] can't find a repository for %s", path) + } +} + +func (ic *importContext) isInGitFolder(path string, isDirectory bool) (bool, int64) { + if path == "" || path == "/" { + return false, 0 + } + ic.gitInfoCacheMutex.RLock() + entry, ok := ic.gitInfoCache[path] + ic.gitInfoCacheMutex.RUnlock() + if ok { + log.Printf("[TRACE] GitInfo cache hit for %s: %v", path, entry) + return entry.IsPresent, entry.RepoId + } + isPresent := false + repoId := int64(0) + if !isDirectory { + parts := strings.Split(path, "/") + if len(parts) < 3 { // we can't have Git Folder in the / itself, so we can ignore all files/notebooks in the root + ic.gitInfoCacheMutex.Lock() + ic.gitInfoCache[path] = gitInfoCacheEntry{IsPresent: isPresent, RepoId: repoId} + ic.gitInfoCacheMutex.Unlock() + return isPresent, repoId + } + } + // Check if we have GitInfo for the path + notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client) + objStatus, err := notebooksAPI.GetStatus(path, true) + if err != nil { + log.Printf("[WARN] can't get GitInfo for %s: %v", path, err) + } else if objStatus.GitInfo != nil { + log.Printf("[DEBUG] GitInfo for %s: %v", path, objStatus.GitInfo) + isPresent = true + repoId = objStatus.GitInfo.Id + } else { + log.Printf("[DEBUG] No GitInfo for %s", path) + } + ic.gitInfoCacheMutex.Lock() + ic.gitInfoCache[path] = gitInfoCacheEntry{IsPresent: isPresent, RepoId: repoId} + ic.gitInfoCacheMutex.Unlock() + return isPresent, repoId +} + func (ic *importContext) emitWorkspaceObject(objType, path string) { - path = maybeStringWorkspacePrefix(path) + path = maybeStripWorkspacePrefix(path) if isRepoPath(path) { ic.emitRepoByPath(path) + } else if ok, repoId := ic.isInGitFolder(path, objType == "databricks_directory"); ok { + ic.Emit(&resource{ + Resource: "databricks_repo", + ID: strconv.FormatInt(repoId, 10), + }) } else { ic.maybeEmitWorkspaceObject(objType, path, nil) } diff --git a/workspace/resource_notebook.go b/workspace/resource_notebook.go index deed2caa7..991bcad47 100644 --- a/workspace/resource_notebook.go +++ b/workspace/resource_notebook.go @@ -20,6 +20,7 @@ const ( Notebook string = "NOTEBOOK" File string = "FILE" Directory string = "DIRECTORY" + Repo string = "REPO" Scala string = "SCALA" Python string = "PYTHON" SQL string = "SQL" @@ -58,6 +59,8 @@ type ObjectStatus struct { ModifiedAt int64 `json:"modified_at,omitempty"` ModifiedAtInteractive *ModifiedAtInteractive `json:"modified_at_interactive,omitempty"` Size int64 `json:"size,omitempty"` + GitInfo *workspace.RepoInfo `json:"git_info,omitempty"` + ResourceId string `json:"resource_id,omitempty"` } // ExportPath contains the base64 content of the notebook @@ -108,11 +111,20 @@ func (a NotebooksAPI) Create(r ImportPath) error { // Read returns the notebook metadata and not the contents func (a NotebooksAPI) Read(path string) (ObjectStatus, error) { + return a.GetStatus(path, false) +} + +// Read returns the notebook metadata and not the contents +func (a NotebooksAPI) GetStatus(path string, returnGitInfo bool) (ObjectStatus, error) { var notebookInfo ObjectStatus + params := map[string]string{ + "path": path, + } + if returnGitInfo { + params["return_git_info"] = "true" + } _, err := common.RetryOnTimeout(a.context, func(ctx context.Context) (*ObjectStatus, error) { - err := a.client.Get(a.context, "/workspace/get-status", map[string]string{ - "path": path, - }, ¬ebookInfo) + err := a.client.Get(a.context, "/workspace/get-status", params, ¬ebookInfo) return nil, err }) return notebookInfo, err