Skip to content

Commit

Permalink
[Internal] Refactored databricks_zones and `databricks_spark_versio…
Browse files Browse the repository at this point in the history
…ns` data sources to Go SDK (#3687)

## Changes
- `LatestSparkVersionOrDefault` now returns 11.3 LTS, as 7.3 LTS is
deprecated
- Refactored `databricks_zones` to Go SDK
- Refactored `databricks_spark_versions` to Go SDK. This refactoring
require one additional change to `resource.go`:
- Add new method `WorkspaceDataWithCustomizeFunc` to allow customization
of the data source schema
- Removed Spark versions related methods, as these have now moved to Go
SDK. This requires migrating the function `LatestSparkVersionOrDefault`
to a Go SDK method, which requires changing existing structs in
Terraform provider to equivalent in Go SDK (`clusters.SparkVersionsList`
to `compute.GetSparkVersionsResponse`, etc.)

## Tests
<!-- 
How is this tested? Please see the checklist below and also describe any
other relevant tests
-->

- [x] `make test` run locally
- [x] covered with integration tests in `internal/acceptance`
- [x] relevant acceptance tests are passing
- [x] using Go SDK
  • Loading branch information
nkvuong authored Jul 23, 2024
1 parent b138c0b commit 34bac74
Show file tree
Hide file tree
Showing 18 changed files with 264 additions and 454 deletions.
2 changes: 1 addition & 1 deletion access/resource_sql_permissions.go
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ func (ta *SqlPermissions) initCluster(ctx context.Context, d *schema.ResourceDat
}

func (ta *SqlPermissions) getOrCreateCluster(clustersAPI clusters.ClustersAPI) (string, error) {
sparkVersion := clustersAPI.LatestSparkVersionOrDefault(clusters.SparkVersionRequest{
sparkVersion := clusters.LatestSparkVersionOrDefault(clustersAPI.Context(), clustersAPI.WorkspaceClient(), compute.SparkVersionRequest{
Latest: true,
})
nodeType := clustersAPI.GetSmallestNodeType(compute.NodeTypeRequest{LocalDisk: true})
Expand Down
20 changes: 10 additions & 10 deletions access/resource_sql_permissions_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,11 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
Method: "GET",
ReuseRequest: true,
Resource: "/api/2.0/clusters/spark-versions",
Response: clusters.SparkVersionsList{
SparkVersions: []clusters.SparkVersion{
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
Key: "7.1.x-cpu-ml-scala2.12",
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
},
},
Expand Down Expand Up @@ -222,7 +222,7 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
AutoterminationMinutes: 10,
ClusterName: "terraform-table-acl",
NodeTypeID: "Standard_F4s",
SparkVersion: "7.3.x-scala2.12",
SparkVersion: "11.3.x-scala2.12",
CustomTags: map[string]string{
"ResourceClass": "SingleNode",
},
Expand Down Expand Up @@ -262,11 +262,11 @@ var createSharedCluster = []qa.HTTPFixture{
Method: "GET",
ReuseRequest: true,
Resource: "/api/2.0/clusters/spark-versions",
Response: clusters.SparkVersionsList{
SparkVersions: []clusters.SparkVersion{
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
Key: "7.1.x-cpu-ml-scala2.12",
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
},
},
Expand Down Expand Up @@ -299,7 +299,7 @@ var createSharedCluster = []qa.HTTPFixture{
AutoterminationMinutes: 10,
ClusterName: "terraform-table-acl",
NodeTypeID: "Standard_F4s",
SparkVersion: "7.3.x-scala2.12",
SparkVersion: "11.3.x-scala2.12",
CustomTags: map[string]string{
"ResourceClass": "SingleNode",
},
Expand Down
2 changes: 1 addition & 1 deletion catalog/resource_sql_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ func (ti *SqlTableInfo) initCluster(ctx context.Context, d *schema.ResourceData,
}

func (ti *SqlTableInfo) getOrCreateCluster(clusterName string, clustersAPI clusters.ClustersAPI) (string, error) {
sparkVersion := clustersAPI.LatestSparkVersionOrDefault(clusters.SparkVersionRequest{
sparkVersion := clusters.LatestSparkVersionOrDefault(clustersAPI.Context(), clustersAPI.WorkspaceClient(), compute.SparkVersionRequest{
Latest: true,
})
nodeType := clustersAPI.GetSmallestNodeType(compute.NodeTypeRequest{LocalDisk: true})
Expand Down
12 changes: 6 additions & 6 deletions catalog/resource_sql_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1248,15 +1248,15 @@ var baseClusterFixture = []qa.HTTPFixture{
Method: "GET",
ReuseRequest: true,
Resource: "/api/2.0/clusters/spark-versions",
Response: clusters.SparkVersionsList{
SparkVersions: []clusters.SparkVersion{
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
Key: "7.1.x-cpu-ml-scala2.12",
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
{
Version: "7.3.x-scala2.12",
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
Key: "7.3.x-scala2.12",
Name: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
},
},
},
Expand Down
16 changes: 15 additions & 1 deletion clusters/clusters_api.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"sync"
"time"

"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute"

Expand Down Expand Up @@ -574,6 +575,19 @@ type ClustersAPI struct {
context context.Context
}

// Temporary function to be used until all resources are migrated to Go SDK
// Create a workspace client
func (a ClustersAPI) WorkspaceClient() *databricks.WorkspaceClient {
client, _ := a.client.WorkspaceClient()
return client
}

// Temporary function to be used until all resources are migrated to Go SDK
// Return a context
func (a ClustersAPI) Context() context.Context {
return a.context
}

// Create creates a new Spark cluster and waits till it's running
func (a ClustersAPI) Create(cluster Cluster) (info ClusterInfo, err error) {
var ci ClusterID
Expand Down Expand Up @@ -903,7 +917,7 @@ func (a ClustersAPI) GetOrCreateRunningCluster(name string, custom ...Cluster) (
r := Cluster{
NumWorkers: 1,
ClusterName: name,
SparkVersion: a.LatestSparkVersionOrDefault(SparkVersionRequest{
SparkVersion: LatestSparkVersionOrDefault(a.Context(), a.WorkspaceClient(), compute.SparkVersionRequest{
Latest: true,
LongTermSupport: true,
}),
Expand Down
9 changes: 9 additions & 0 deletions clusters/clusters_api_sdk.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,12 @@ func StartClusterAndGetInfo(ctx context.Context, w *databricks.WorkspaceClient,
}
return w.Clusters.StartByClusterIdAndWait(ctx, clusterID)
}

// LatestSparkVersionOrDefault returns Spark version matching the definition, or default in case of error
func LatestSparkVersionOrDefault(ctx context.Context, w *databricks.WorkspaceClient, svr compute.SparkVersionRequest) string {
version, err := w.Clusters.SelectSparkVersion(ctx, svr)
if err != nil {
return "11.3.x-scala2.12"
}
return version
}
135 changes: 11 additions & 124 deletions clusters/clusters_api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import (
"fmt"

// "reflect"
"strings"

"testing"

"github.com/databricks/databricks-sdk-go/apierr"
Expand All @@ -28,23 +28,23 @@ func TestGetOrCreateRunningCluster_AzureAuth(t *testing.T) {
Method: "GET",
ReuseRequest: true,
Resource: "/api/2.0/clusters/spark-versions",
Response: SparkVersionsList{
SparkVersions: []SparkVersion{
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
Key: "7.1.x-cpu-ml-scala2.12",
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
{
Version: "apache-spark-2.4.x-scala2.11",
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
Key: "apache-spark-2.4.x-scala2.11",
Name: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
},
{
Version: "7.3.x-scala2.12",
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
Key: "7.3.x-scala2.12",
Name: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
},
{
Version: "6.4.x-scala2.11",
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
Key: "6.4.x-scala2.11",
Name: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
},
},
},
Expand Down Expand Up @@ -1016,119 +1016,6 @@ func TestEventsEmptyResult(t *testing.T) {
assert.Equal(t, len(clusterEvents), 0)
}

func TestListSparkVersions(t *testing.T) {
client, server, err := qa.HttpFixtureClient(t, []qa.HTTPFixture{
{
Method: "GET",
Resource: "/api/2.0/clusters/spark-versions",
Response: SparkVersionsList{
SparkVersions: []SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
{
Version: "apache-spark-2.4.x-scala2.11",
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
},
{
Version: "7.3.x-hls-scala2.12",
Description: "7.3 LTS Genomics (includes Apache Spark 3.0.1, Scala 2.12)",
},
{
Version: "6.4.x-scala2.11",
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
},
},
},
},
})
defer server.Close()
require.NoError(t, err)

ctx := context.Background()
sparkVersions, err := NewClustersAPI(ctx, client).ListSparkVersions()
require.NoError(t, err)
require.Equal(t, 4, len(sparkVersions.SparkVersions))
require.Equal(t, "6.4.x-scala2.11", sparkVersions.SparkVersions[3].Version)
}

func TestListSparkVersionsWithError(t *testing.T) {
client, server, err := qa.HttpFixtureClient(t, []qa.HTTPFixture{
{
Method: "GET",
Resource: "/api/2.0/clusters/spark-versions",
Response: "{garbage....",
},
})
defer server.Close()
require.NoError(t, err)

ctx := context.Background()
_, err = NewClustersAPI(ctx, client).ListSparkVersions()
require.Error(t, err)
require.Equal(t, true, strings.Contains(err.Error(), "invalid character 'g' looking"))
}

func TestGetLatestSparkVersion(t *testing.T) {
versions := SparkVersionsList{
SparkVersions: []SparkVersion{
{
Version: "7.1.x-cpu-ml-scala2.12",
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
},
{
Version: "apache-spark-2.4.x-scala2.11",
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
},
{
Version: "7.3.x-hls-scala2.12",
Description: "7.3 LTS Genomics (includes Apache Spark 3.0.1, Scala 2.12)",
},
{
Version: "6.4.x-scala2.11",
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
},
{
Version: "7.3.x-scala2.12",
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
},
{
Version: "7.4.x-scala2.12",
Description: "7.4 (includes Apache Spark 3.0.1, Scala 2.12)",
},
{
Version: "7.1.x-scala2.12",
Description: "7.1 (includes Apache Spark 3.0.0, Scala 2.12)",
},
},
}

version, err := versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", Latest: true})
require.NoError(t, err)
require.Equal(t, "7.4.x-scala2.12", version)

version, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", LongTermSupport: true, Latest: true})
require.NoError(t, err)
require.Equal(t, "7.3.x-scala2.12", version)

version, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", Latest: true, SparkVersion: "3.0.0"})
require.NoError(t, err)
require.Equal(t, "7.1.x-scala2.12", version)

_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12"})
require.Error(t, err)
require.Equal(t, true, strings.Contains(err.Error(), "query returned multiple results"))

_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", ML: true, Genomics: true})
require.Error(t, err)
require.Equal(t, true, strings.Contains(err.Error(), "query returned no results"))

_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", SparkVersion: "3.10"})
require.Error(t, err)
require.Equal(t, true, strings.Contains(err.Error(), "query returned no results"))
}

func TestClusterState_CanReach(t *testing.T) {
tests := []struct {
from ClusterState
Expand Down
Loading

0 comments on commit 34bac74

Please sign in to comment.