Skip to content

Commit

Permalink
Adds databricks_volume as data source (#3211)
Browse files Browse the repository at this point in the history
* data_volume

* data_volume unit and acceptance tests

* docs

* WorkspaceDataWithCustomParams test

* fixed formatting

* Removing unnecessary changes to resource.go

* refactored data_volume

* making change for consitency with GO SDK v0.35.0

* Update catalog/data_volume.go

* Update catalog/data_volume.go

* data source as nested strucutre

* review comments addressed

* acceptance test

---------

Co-authored-by: Alex Ott <[email protected]>
Co-authored-by: vuong-nguyen <[email protected]>
  • Loading branch information
3 people authored Jul 3, 2024
1 parent c6f949c commit ff837ab
Show file tree
Hide file tree
Showing 6 changed files with 197 additions and 1 deletion.
25 changes: 25 additions & 0 deletions catalog/data_volume.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package catalog

import (
"context"

"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/terraform-provider-databricks/common"
)

func DataSourceVolume() common.Resource {
return common.WorkspaceData(func(ctx context.Context, data *struct {
Id string `json:"id,omitempty" tf:"computed"`
Name string `json:"name"`
Volume *catalog.VolumeInfo `json:"volume_info,omitempty" tf:"computed"`
}, w *databricks.WorkspaceClient) error {
volume, err := w.Volumes.ReadByName(ctx, data.Name)
if err != nil {
return err
}
data.Volume = volume
data.Id = volume.FullName
return nil
})
}
50 changes: 50 additions & 0 deletions catalog/data_volume_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package catalog

import (
"testing"

"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/terraform-provider-databricks/qa"
"github.com/stretchr/testify/mock"
)

func TestDataSourceVolume(t *testing.T) {
qa.ResourceFixture{
MockWorkspaceClientFunc: func(m *mocks.MockWorkspaceClient) {
e := m.GetMockVolumesAPI().EXPECT()
e.ReadByName(mock.Anything, "a.b.c").Return(&catalog.VolumeInfo{
FullName: "a.b.c",
CatalogName: "a",
SchemaName: "b",
Name: "c",
Owner: "account users",
VolumeType: catalog.VolumeTypeManaged,
}, nil)
},
Resource: DataSourceVolume(),
HCL: `
name="a.b.c"`,
Read: true,
NonWritable: true,
ID: "_",
}.ApplyAndExpectData(t, map[string]any{
"name": "a.b.c",
"volume_info.0.full_name": "a.b.c",
"volume_info.0.catalog_name": "a",
"volume_info.0.schema_name": "b",
"volume_info.0.name": "c",
"volume_info.0.owner": "account users",
"volume_info.0.volume_type": "MANAGED",
})
}

func TestDataSourceVolume_Error(t *testing.T) {
qa.ResourceFixture{
Fixtures: qa.HTTPFailures,
Resource: DataSourceVolume(),
Read: true,
NonWritable: true,
ID: "_",
}.ExpectError(t, "i'm a teapot")
}
2 changes: 1 addition & 1 deletion common/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ func genericDatabricksData[T, P, C any](
hasOther bool) Resource {
var dummy T
var other P
otherFields := StructToSchema(other, NoCustomize)
otherFields := StructToSchema(other, nil)
s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema {
// For WorkspaceData and AccountData, a single data type is used to represent all of the fields of
// the resource, so its configuration is correct. For the *WithParams methods, the SdkType parameter
Expand Down
69 changes: 69 additions & 0 deletions docs/data-sources/volume.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
---
subcategory: "Unity Catalog"
---
# databricks_volume Data Source

Retrieves details about [databricks_volume](../resources/volume.md) that was created by Terraform or manually.
A volume can be identified by its three-level (fully qualified) name (in the form of: `catalog_name`.`schema_name`.`volume_name`) as input. This can be retrieved programmatically using [databricks_volumes](../data-sources/volumes.md) data source.

## Example Usage

* Retrieve details of all volumes in in a _things_ [databricks_schema](../resources/schema.md) of a _sandbox_ [databricks_catalog](../resources/catalog.md):

```hcl
data "databricks_volumes" "all" {
catalog_name = "sandbox"
schema_name = "things"
}
data "databricks_volume" {
for_each = data.datatbricks_volumes.all.ids
name = each.value
}
```

* Search for a specific volume by its fully qualified name

```hcl
data "databricks_volume" "this" {
name = "catalog.schema.volume"
}
```

## Argument Reference

* `name` - (Required) a fully qualified name of [databricks_volume](../resources/volume.md): *`catalog`.`schema`.`volume`*


## Attribute Reference

In addition to all arguments above, the following attributes are exported:

* `id` - ID of this Unity Catalog Volume in form of `<catalog>.<schema>.<name>`.
* `volume_info` - TableInfo object for a Unity Catalog table. This contains the following attributes:
* `name` - Name of table, relative to parent schema.
* `access_point` - the AWS access point to use when accessing s3 bucket for this volume's external location
* `browse_only` - indicates whether the principal is limited to retrieving metadata for the volume through the BROWSE privilege when include_browse is enabled in the request.
* `catalog_name` - the name of the catalog where the schema and the volume are
* `comment` - the comment attached to the volume
* `created_at` - the Unix timestamp at the volume's creation
* `created_by` - the identifier of the user who created the volume
* `encryption_details` - encryption options that apply to clients connecting to cloud storage
* `full_name` - the three-level (fully qualified) name of the volume
* `metastore_id` - the unique identifier of the metastore
* `name` - the name of the volume
* `owner` - the identifier of the user who owns the volume
* `schema_name` - the name of the schema where the volume is
* `storage_location` - the storage location on the cloud
* `updated_at` - the timestamp of the last time changes were made to the volume
* `updated_by` - the identifier of the user who updated the volume last time
* `volume_id` - the unique identifier of the volume
* `volume_type` - whether the volume is `MANAGED` or `EXTERNAL`

## Related Resources

The following resources are used in the same context:

* [databricks_volume](../resources/volume.md) to manage volumes within Unity Catalog.
* [databricks_schema](../resources/schema.md) to manage schemas within Unity Catalog.
* [databricks_catalog](../resources/catalog.md) to manage catalogs within Unity Catalog.
51 changes: 51 additions & 0 deletions internal/acceptance/data_volume_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package acceptance

import (
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
"github.com/stretchr/testify/require"
)

func checkDataSourceVolume(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
_, ok := s.Modules[0].Resources["data.databricks_volume.this"]
require.True(t, ok, "data.databricks_volume.this has to be there")
return nil
}
}
func TestUcAccDataSourceVolume(t *testing.T) {
unityWorkspaceLevel(t, step{
Template: `
resource "databricks_catalog" "sandbox" {
name = "sandbox{var.RANDOM}"
comment = "this catalog is managed by terraform"
properties = {
purpose = "testing"
}
}
resource "databricks_schema" "things" {
catalog_name = databricks_catalog.sandbox.id
name = "things{var.RANDOM}"
comment = "this database is managed by terraform"
properties = {
kind = "various"
}
}
resource "databricks_volume" "this" {
name = "volume_data_source_test"
catalog_name = databricks_catalog.sandbox.name
schema_name = databricks_schema.things.name
volume_type = "MANAGED"
}
data "databricks_volume" "this" {
name = databricks_volume.this.id
depends_on = [ databricks_volume.this ]
}
`,
Check: checkDataSourceVolume(t),
})
}
1 change: 1 addition & 0 deletions provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ func DatabricksProvider() *schema.Provider {
"databricks_table": catalog.DataSourceTable().ToResource(),
"databricks_tables": catalog.DataSourceTables().ToResource(),
"databricks_views": catalog.DataSourceViews().ToResource(),
"databricks_volume": catalog.DataSourceVolume().ToResource(),
"databricks_volumes": catalog.DataSourceVolumes().ToResource(),
"databricks_user": scim.DataSourceUser().ToResource(),
"databricks_zones": clusters.DataSourceClusterZones().ToResource(),
Expand Down

0 comments on commit ff837ab

Please sign in to comment.