Skip to content

Commit

Permalink
fix: Fix few smaller issues (#2507)
Browse files Browse the repository at this point in the history
- Removed incorrect validation on procedure resources that were causing
failure of the following tests:
  - `TestAcc_Procedure_Java`
  - `TestAcc_Procedure_Scala`
- Removed incorrect deprecation from table resource (#2488)
- Fixed ShowByID for views (#2506)
- Fixed notification_integration resource warnings for deprecated
parameters (#2501)
- Fixed external functions and procedure - state migrations similar to
functions (#2490)

References: #2501 #2506 #2488 #2490
  • Loading branch information
sfc-gh-asawicki authored Feb 15, 2024
1 parent dfa52b2 commit a836871
Show file tree
Hide file tree
Showing 14 changed files with 459 additions and 9 deletions.
2 changes: 1 addition & 1 deletion docs/resources/table.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ resource "snowflake_table" "table" {
- `cluster_by` (List of String) A list of one or more table columns/expressions to be used as clustering key(s) for the table
- `comment` (String) Specifies a comment for the table.
- `data_retention_days` (Number, Deprecated) Specifies the retention period for the table so that Time Travel actions (SELECT, CLONE, UNDROP) can be performed on historical data in the table. Default value is 1, if you wish to inherit the parent schema setting then pass in the schema attribute to this argument.
- `data_retention_time_in_days` (Number, Deprecated) Specifies the retention period for the table so that Time Travel actions (SELECT, CLONE, UNDROP) can be performed on historical data in the table. Default value is 1, if you wish to inherit the parent schema setting then pass in the schema attribute to this argument.
- `data_retention_time_in_days` (Number) Specifies the retention period for the table so that Time Travel actions (SELECT, CLONE, UNDROP) can be performed on historical data in the table. Default value is 1, if you wish to inherit the parent schema setting then pass in the schema attribute to this argument.
- `primary_key` (Block List, Max: 1, Deprecated) Definitions of primary key constraint to create on table (see [below for nested schema](#nestedblock--primary_key))
- `tag` (Block List, Deprecated) Definitions of a tag to associate with the resource. (see [below for nested schema](#nestedblock--tag))

Expand Down
12 changes: 12 additions & 0 deletions pkg/resources/external_function.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"strings"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/hashicorp/go-cty/cty"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
Expand Down Expand Up @@ -180,6 +181,8 @@ var externalFunctionSchema = map[string]*schema.Schema{
// ExternalFunction returns a pointer to the resource representing an external function.
func ExternalFunction() *schema.Resource {
return &schema.Resource{
SchemaVersion: 1,

CreateContext: CreateContextExternalFunction,
ReadContext: ReadContextExternalFunction,
UpdateContext: UpdateContextExternalFunction,
Expand All @@ -189,6 +192,15 @@ func ExternalFunction() *schema.Resource {
Importer: &schema.ResourceImporter{
StateContext: schema.ImportStatePassthroughContext,
},

StateUpgraders: []schema.StateUpgrader{
{
Version: 0,
// setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject
Type: cty.EmptyObject,
Upgrade: v085ExternalFunctionStateUpgrader,
},
},
}
}

Expand Down
81 changes: 81 additions & 0 deletions pkg/resources/external_function_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,12 @@ import (
"testing"

acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/hashicorp/terraform-plugin-testing/config"
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/plancheck"
"github.com/hashicorp/terraform-plugin-testing/tfversion"
)

Expand Down Expand Up @@ -232,3 +235,81 @@ func TestAcc_ExternalFunction_complete(t *testing.T) {
},
})
}

func TestAcc_ExternalFunction_migrateFromVersion085(t *testing.T) {
name := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha))
resourceName := "snowflake_external_function.f"

resource.Test(t, resource.TestCase{
PreCheck: func() { acc.TestAccPreCheck(t) },
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
tfversion.RequireAbove(tfversion.Version1_5_0),
},
CheckDestroy: nil,

Steps: []resource.TestStep{
{
ExternalProviders: map[string]resource.ExternalProvider{
"snowflake": {
VersionConstraint: "=0.85.0",
Source: "Snowflake-Labs/snowflake",
},
},
Config: externalFunctionConfig(acc.TestDatabaseName, acc.TestSchemaName, name),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|%s|%s|VARCHAR-VARCHAR", acc.TestDatabaseName, acc.TestSchemaName, name)),
resource.TestCheckResourceAttr(resourceName, "name", name),
resource.TestCheckResourceAttr(resourceName, "database", "\""+acc.TestDatabaseName+"\""),
resource.TestCheckResourceAttr(resourceName, "schema", "\""+acc.TestSchemaName+"\""),
resource.TestCheckNoResourceAttr(resourceName, "return_null_allowed"),
),
ExpectNonEmptyPlan: true,
},
{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
Config: externalFunctionConfig(acc.TestDatabaseName, acc.TestSchemaName, name),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()},
},
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR}).FullyQualifiedName()),
resource.TestCheckResourceAttr(resourceName, "name", name),
resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName),
resource.TestCheckResourceAttr(resourceName, "return_null_allowed", "true"),
),
},
},
})
}

func externalFunctionConfig(database string, schema string, name string) string {
return fmt.Sprintf(`
resource "snowflake_api_integration" "test_api_int" {
name = "%[3]s"
api_provider = "aws_api_gateway"
api_aws_role_arn = "arn:aws:iam::000000000001:/role/test"
api_allowed_prefixes = ["https://123456.execute-api.us-west-2.amazonaws.com/prod/"]
enabled = true
}
resource "snowflake_external_function" "f" {
name = "%[3]s"
database = "%[1]s"
schema = "%[2]s"
arg {
name = "ARG1"
type = "VARCHAR"
}
arg {
name = "ARG2"
type = "VARCHAR"
}
return_type = "VARIANT"
return_behavior = "IMMUTABLE"
api_integration = snowflake_api_integration.test_api_int.name
url_of_proxy_and_resource = "https://123456.execute-api.us-west-2.amazonaws.com/prod/test_func"
}
`, database, schema, name)
}
77 changes: 77 additions & 0 deletions pkg/resources/external_function_state_upgraders.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
package resources

import (
"context"
"encoding/csv"
"strings"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
)

type v085ExternalFunctionId struct {
DatabaseName string
SchemaName string
ExternalFunctionName string
ExternalFunctionArgTypes string
}

func parseV085ExternalFunctionId(stringID string) (*v085ExternalFunctionId, error) {
reader := csv.NewReader(strings.NewReader(stringID))
reader.Comma = '|'
lines, err := reader.ReadAll()
if err != nil {
return nil, sdk.NewError("not CSV compatible")
}

if len(lines) != 1 {
return nil, sdk.NewError("1 line at a time")
}
if len(lines[0]) != 4 {
return nil, sdk.NewError("4 fields allowed")
}

return &v085ExternalFunctionId{
DatabaseName: lines[0][0],
SchemaName: lines[0][1],
ExternalFunctionName: lines[0][2],
ExternalFunctionArgTypes: lines[0][3],
}, nil
}

func v085ExternalFunctionStateUpgrader(ctx context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) {
if rawState == nil {
return rawState, nil
}

oldId := rawState["id"].(string)
parsedV085ExternalFunctionId, err := parseV085ExternalFunctionId(oldId)
if err != nil {
return nil, err
}

argDataTypes := make([]sdk.DataType, 0)
if parsedV085ExternalFunctionId.ExternalFunctionArgTypes != "" {
for _, argType := range strings.Split(parsedV085ExternalFunctionId.ExternalFunctionArgTypes, "-") {
argDataType, err := sdk.ToDataType(argType)
if err != nil {
return nil, err
}
argDataTypes = append(argDataTypes, argDataType)
}
}

schemaObjectIdentifierWithArguments := sdk.NewSchemaObjectIdentifierWithArguments(parsedV085ExternalFunctionId.DatabaseName, parsedV085ExternalFunctionId.SchemaName, parsedV085ExternalFunctionId.ExternalFunctionName, argDataTypes)
rawState["id"] = schemaObjectIdentifierWithArguments.FullyQualifiedName()

oldDatabase := rawState["database"].(string)
oldSchema := rawState["schema"].(string)

rawState["database"] = strings.Trim(oldDatabase, "\"")
rawState["schema"] = strings.Trim(oldSchema, "\"")

if old, isPresent := rawState["return_null_allowed"]; !isPresent || old == nil || old.(string) == "" {
rawState["return_null_allowed"] = "true"
}

return rawState, nil
}
2 changes: 2 additions & 0 deletions pkg/resources/function_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ func TestAcc_Function_migrateFromVersion085(t *testing.T) {
},
Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, name),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|%s|%s|VARCHAR", acc.TestDatabaseName, acc.TestSchemaName, name)),
resource.TestCheckResourceAttr(resourceName, "name", name),
resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName),
Expand All @@ -218,6 +219,7 @@ func TestAcc_Function_migrateFromVersion085(t *testing.T) {
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, name),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR}).FullyQualifiedName()),
resource.TestCheckResourceAttr(resourceName, "name", name),
resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName),
Expand Down
10 changes: 8 additions & 2 deletions pkg/resources/function_state_upgraders.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,20 @@ type v085FunctionId struct {
func parseV085FunctionId(v string) (*v085FunctionId, error) {
arr := strings.Split(v, "|")
if len(arr) != 4 {
return nil, fmt.Errorf("ID %v is invalid", v)
return nil, sdk.NewError(fmt.Sprintf("ID %v is invalid", v))
}

// this is a bit different from V085 state, but it was buggy
var args []string
if arr[3] != "" {
args = strings.Split(arr[3], "-")
}

return &v085FunctionId{
DatabaseName: arr[0],
SchemaName: arr[1],
FunctionName: arr[2],
ArgTypes: strings.Split(arr[3], "-"),
ArgTypes: args,
}, nil
}

Expand Down
4 changes: 3 additions & 1 deletion pkg/resources/notification_integration.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,10 @@ var notificationIntegrationSchema = map[string]*schema.Schema{
Optional: true,
ValidateFunc: validation.StringInSlice([]string{"INBOUND", "OUTBOUND"}, true),
Description: "Direction of the cloud messaging with respect to Snowflake (required only for error notifications)",
ForceNew: true,
Deprecated: "Will be removed - it is added automatically on the SDK level.",
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
return true
},
},
// This part of the schema is the cloudProviderParams in the Snowflake documentation and differs between vendors
"notification_provider": {
Expand Down
110 changes: 110 additions & 0 deletions pkg/resources/notification_integration_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/plancheck"
"github.com/hashicorp/terraform-plugin-testing/terraform"
"github.com/hashicorp/terraform-plugin-testing/tfversion"
)
Expand Down Expand Up @@ -220,6 +221,91 @@ func TestAcc_NotificationIntegration_PushAzure(t *testing.T) {
t.Skip("Skipping because can't be currently created. Check 'create and describe notification integration - push azure' test in the SDK.")
}

// proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2501
func TestAcc_NotificationIntegration_migrateFromVersion085(t *testing.T) {
accName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha))

const gcpPubsubSubscriptionName = "projects/project-1234/subscriptions/sub2"

resource.Test(t, resource.TestCase{
PreCheck: func() { acc.TestAccPreCheck(t) },
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
tfversion.RequireAbove(tfversion.Version1_5_0),
},
CheckDestroy: testAccCheckNotificationIntegrationDestroy,
Steps: []resource.TestStep{
{
ExternalProviders: map[string]resource.ExternalProvider{
"snowflake": {
VersionConstraint: "=0.85.0",
Source: "Snowflake-Labs/snowflake",
},
},
Config: googleAutoConfig(accName, gcpPubsubSubscriptionName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "enabled", "true"),
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName),
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "direction", "INBOUND"),
),
},
{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
Config: googleAutoConfigWithoutDirection(accName, gcpPubsubSubscriptionName),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
plancheck.ExpectEmptyPlan(),
},
},
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "enabled", "true"),
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName),
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "direction", "INBOUND"),
),
},
},
})
}

func TestAcc_NotificationIntegration_migrateFromVersion085_explicitType(t *testing.T) {
accName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha))

const gcpPubsubSubscriptionName = "projects/project-1234/subscriptions/sub2"

resource.Test(t, resource.TestCase{
PreCheck: func() { acc.TestAccPreCheck(t) },
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
tfversion.RequireAbove(tfversion.Version1_5_0),
},
CheckDestroy: testAccCheckNotificationIntegrationDestroy,
Steps: []resource.TestStep{
{
ExternalProviders: map[string]resource.ExternalProvider{
"snowflake": {
VersionConstraint: "=0.85.0",
Source: "Snowflake-Labs/snowflake",
},
},
Config: googleAutoConfigWithExplicitType(accName, gcpPubsubSubscriptionName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName),
),
},
{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
Config: googleAutoConfig(accName, gcpPubsubSubscriptionName),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
plancheck.ExpectEmptyPlan(),
},
},
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName),
),
},
},
})
}

func googleAutoConfig(name string, gcpPubsubSubscriptionName string) string {
s := `
resource "snowflake_notification_integration" "test" {
Expand All @@ -232,6 +318,30 @@ resource "snowflake_notification_integration" "test" {
return fmt.Sprintf(s, name, "GCP_PUBSUB", gcpPubsubSubscriptionName)
}

func googleAutoConfigWithoutDirection(name string, gcpPubsubSubscriptionName string) string {
s := `
resource "snowflake_notification_integration" "test" {
name = "%s"
notification_provider = "%s"
gcp_pubsub_subscription_name = "%s"
}
`
return fmt.Sprintf(s, name, "GCP_PUBSUB", gcpPubsubSubscriptionName)
}

func googleAutoConfigWithExplicitType(name string, gcpPubsubSubscriptionName string) string {
s := `
resource "snowflake_notification_integration" "test" {
type = "QUEUE"
name = "%s"
notification_provider = "%s"
gcp_pubsub_subscription_name = "%s"
direction = "INBOUND"
}
`
return fmt.Sprintf(s, name, "GCP_PUBSUB", gcpPubsubSubscriptionName)
}

func azureAutoConfig(name string, azureStorageQueuePrimaryUri string, azureTenantId string) string {
s := `
resource "snowflake_notification_integration" "test" {
Expand Down
Loading

0 comments on commit a836871

Please sign in to comment.