diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md
index 24f182f215..7b15f31f10 100644
--- a/MIGRATION_GUIDE.md
+++ b/MIGRATION_GUIDE.md
@@ -13,6 +13,27 @@ While solving issue [#2733](https://github.com/Snowflake-Labs/terraform-provider
#### *(behavior change)* Validation to arguments type added
Diff suppression for `arguments.type` is needed for the same reason as above for `snowflake_table` resource.
+### tag_masking_policy_association resource changes
+Now the `tag_masking_policy_association` resource will only accept fully qualified names separated by dot `.` instead of pipe `|`.
+
+Before
+```terraform
+resource "snowflake_tag_masking_policy_association" "name" {
+ tag_id = snowflake_tag.this.id
+ masking_policy_id = snowflake_masking_policy.example_masking_policy.id
+}
+```
+
+After
+```terraform
+resource "snowflake_tag_masking_policy_association" "name" {
+ tag_id = "\"${snowflake_tag.this.database}\".\"${snowflake_tag.this.schema}\".\"${snowflake_tag.this.name}\""
+ masking_policy_id = "\"${snowflake_masking_policy.example_masking_policy.database}\".\"${snowflake_masking_policy.example_masking_policy.schema}\".\"${snowflake_masking_policy.example_masking_policy.name}\""
+}
+```
+
+It's more verbose now, but after identifier rework it should be similar to the previous form.
+
## v0.88.0 ➞ v0.89.0
#### *(behavior change)* ForceNew removed
The `ForceNew` field was removed in favor of in-place Update for `name` parameter in:
diff --git a/docs/resources/stream.md b/docs/resources/stream.md
index a10806fd09..5d86bb1f34 100644
--- a/docs/resources/stream.md
+++ b/docs/resources/stream.md
@@ -12,6 +12,17 @@ description: |-
## Example Usage
```terraform
+resource "snowflake_table" "table" {
+ database = "database"
+ schema = "schema"
+ name = "name"
+
+ column {
+ type = "NUMBER(38,0)"
+ name = "id"
+ }
+}
+
resource "snowflake_stream" "stream" {
comment = "A stream."
@@ -19,7 +30,7 @@ resource "snowflake_stream" "stream" {
schema = "schema"
name = "stream"
- on_table = "table"
+ on_table = snowflake_table.table.qualified_name
append_only = false
insert_only = false
diff --git a/docs/resources/table_constraint.md b/docs/resources/table_constraint.md
index 9c891cbece..1fa083563c 100644
--- a/docs/resources/table_constraint.md
+++ b/docs/resources/table_constraint.md
@@ -125,12 +125,15 @@ resource "snowflake_table_constraint" "unique" {
### Nested Schema for `foreign_key_properties`
+Required:
+
+- `references` (Block List, Min: 1, Max: 1) The table and columns that the foreign key references. (see [below for nested schema](#nestedblock--foreign_key_properties--references))
+
Optional:
- `match` (String) The match type for the foreign key. Not applicable for primary/unique keys
- `on_delete` (String) Specifies the action performed when the primary/unique key for the foreign key is deleted. Not applicable for primary/unique keys
- `on_update` (String) Specifies the action performed when the primary/unique key for the foreign key is updated. Not applicable for primary/unique keys
-- `references` (Block List, Max: 1) The table and columns that the foreign key references. Not applicable for primary/unique keys (see [below for nested schema](#nestedblock--foreign_key_properties--references))
### Nested Schema for `foreign_key_properties.references`
diff --git a/docs/resources/tag_masking_policy_association.md b/docs/resources/tag_masking_policy_association.md
index c03deaac66..86468abf58 100644
--- a/docs/resources/tag_masking_policy_association.md
+++ b/docs/resources/tag_masking_policy_association.md
@@ -53,8 +53,8 @@ resource "snowflake_masking_policy" "example_masking_policy" {
}
resource "snowflake_tag_masking_policy_association" "name" {
- tag_id = snowflake_tag.this.id
- masking_policy_id = snowflake_masking_policy.example_masking_policy.id
+ tag_id = "\"${snowflake_tag.this.database}\".\"${snowflake_tag.this.schema}\".\"${snowflake_tag.this.name}\""
+ masking_policy_id = "\"${snowflake_masking_policy.example_masking_policy.database}\".\"${snowflake_masking_policy.example_masking_policy.schema}\".\"${snowflake_masking_policy.example_masking_policy.name}\""
}
```
diff --git a/examples/resources/snowflake_stream/resource.tf b/examples/resources/snowflake_stream/resource.tf
index 883268f94c..231fdb6341 100644
--- a/examples/resources/snowflake_stream/resource.tf
+++ b/examples/resources/snowflake_stream/resource.tf
@@ -1,3 +1,14 @@
+resource "snowflake_table" "table" {
+ database = "database"
+ schema = "schema"
+ name = "name"
+
+ column {
+ type = "NUMBER(38,0)"
+ name = "id"
+ }
+}
+
resource "snowflake_stream" "stream" {
comment = "A stream."
@@ -5,7 +16,7 @@ resource "snowflake_stream" "stream" {
schema = "schema"
name = "stream"
- on_table = "table"
+ on_table = snowflake_table.table.qualified_name
append_only = false
insert_only = false
diff --git a/examples/resources/snowflake_tag_masking_policy_association/resource.tf b/examples/resources/snowflake_tag_masking_policy_association/resource.tf
index 993100423e..4a01c1bce4 100644
--- a/examples/resources/snowflake_tag_masking_policy_association/resource.tf
+++ b/examples/resources/snowflake_tag_masking_policy_association/resource.tf
@@ -39,6 +39,6 @@ resource "snowflake_masking_policy" "example_masking_policy" {
}
resource "snowflake_tag_masking_policy_association" "name" {
- tag_id = snowflake_tag.this.id
- masking_policy_id = snowflake_masking_policy.example_masking_policy.id
+ tag_id = "\"${snowflake_tag.this.database}\".\"${snowflake_tag.this.schema}\".\"${snowflake_tag.this.name}\""
+ masking_policy_id = "\"${snowflake_masking_policy.example_masking_policy.database}\".\"${snowflake_masking_policy.example_masking_policy.schema}\".\"${snowflake_masking_policy.example_masking_policy.name}\""
}
\ No newline at end of file
diff --git a/pkg/resources/account_password_policy_attachment.go b/pkg/resources/account_password_policy_attachment.go
index f06718bbdd..f362ff1629 100644
--- a/pkg/resources/account_password_policy_attachment.go
+++ b/pkg/resources/account_password_policy_attachment.go
@@ -13,10 +13,11 @@ import (
var accountPasswordPolicyAttachmentSchema = map[string]*schema.Schema{
"password_policy": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- Description: "Qualified name (`\"db\".\"schema\".\"policy_name\"`) of the password policy to apply to the current account.",
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: "Qualified name (`\"db\".\"schema\".\"policy_name\"`) of the password policy to apply to the current account.",
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
}
@@ -45,7 +46,6 @@ func CreateAccountPasswordPolicyAttachment(d *schema.ResourceData, meta interfac
if !ok {
return fmt.Errorf("password_policy %s is not a valid password policy qualified name, expected format: `\"db\".\"schema\".\"policy\"`", d.Get("password_policy"))
}
- // passwordPolicy := sdk.NewAccountObjectIdentifier(d.Get("password_policy").(string))
err := client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{
Set: &sdk.AccountSet{
diff --git a/pkg/resources/external_oauth_integration.go b/pkg/resources/external_oauth_integration.go
index 4bb09989ae..0cea6460c0 100644
--- a/pkg/resources/external_oauth_integration.go
+++ b/pkg/resources/external_oauth_integration.go
@@ -402,7 +402,7 @@ func UpdateExternalOauthIntegration(d *schema.ResourceData, meta interface{}) er
if d.HasChange("snowflake_user_mapping_attribute") {
val, ok := d.GetOk("snowflake_user_mapping_attribute")
if ok {
- alterInput.ExternalOauthSnowflakeUserMappingAttribute = val.(snowflake.SFUserMappingAttribute)
+ alterInput.ExternalOauthSnowflakeUserMappingAttribute = snowflake.SFUserMappingAttribute(val.(string))
alterInput.ExternalOauthSnowflakeUserMappingAttributeOk = true
runAlter = true
} else {
@@ -479,7 +479,7 @@ func UpdateExternalOauthIntegration(d *schema.ResourceData, meta interface{}) er
if d.HasChange("any_role_mode") {
val, ok := d.GetOk("any_role_mode")
if ok {
- alterInput.ExternalOauthAnyRoleMode = val.(snowflake.AnyRoleMode)
+ alterInput.ExternalOauthAnyRoleMode = snowflake.AnyRoleMode(val.(string))
alterInput.ExternalOauthAnyRoleModeOk = true
runAlter = true
} else {
diff --git a/pkg/resources/failover_group.go b/pkg/resources/failover_group.go
index eb2eb76bda..a3bc18c2cb 100644
--- a/pkg/resources/failover_group.go
+++ b/pkg/resources/failover_group.go
@@ -438,32 +438,42 @@ func UpdateFailoverGroup(d *schema.ResourceData, meta interface{}) error {
}
if d.HasChange("replication_schedule") {
- _, n := d.GetChange("replication_schedule")
- replicationSchedule := n.([]interface{})[0].(map[string]interface{})
- c := replicationSchedule["cron"].([]interface{})
- var updatedReplicationSchedule string
- if len(c) > 0 {
- cron := c[0].(map[string]interface{})
- cronExpression := cron["expression"].(string)
- cronExpression = "USING CRON " + cronExpression
- if v, ok := cron["time_zone"]; ok {
- timeZone := v.(string)
- if timeZone != "" {
- cronExpression = cronExpression + " " + timeZone
+ replicationSchedules := d.Get("replication_schedule").([]any)
+ if len(replicationSchedules) > 0 {
+ replicationSchedule := replicationSchedules[0].(map[string]any)
+ crons := replicationSchedule["cron"].([]any)
+ var updatedReplicationSchedule string
+ if len(crons) > 0 {
+ cron := crons[0].(map[string]any)
+ cronExpression := cron["expression"].(string)
+ cronExpression = "USING CRON " + cronExpression
+ if v, ok := cron["time_zone"]; ok {
+ timeZone := v.(string)
+ if timeZone != "" {
+ cronExpression = cronExpression + " " + timeZone
+ }
}
+ updatedReplicationSchedule = cronExpression
+ } else {
+ updatedReplicationSchedule = fmt.Sprintf("%d MINUTE", replicationSchedule["interval"].(int))
+ }
+ err := client.FailoverGroups.AlterSource(ctx, id, &sdk.AlterSourceFailoverGroupOptions{
+ Set: &sdk.FailoverGroupSet{
+ ReplicationSchedule: sdk.String(updatedReplicationSchedule),
+ },
+ })
+ if err != nil {
+ return err
}
- updatedReplicationSchedule = cronExpression
} else {
- updatedReplicationSchedule = fmt.Sprintf("%d MINUTE", replicationSchedule["interval"].(int))
- }
-
- err := client.FailoverGroups.AlterSource(ctx, id, &sdk.AlterSourceFailoverGroupOptions{
- Set: &sdk.FailoverGroupSet{
- ReplicationSchedule: sdk.String(updatedReplicationSchedule),
- },
- })
- if err != nil {
- return err
+ err := client.FailoverGroups.AlterSource(ctx, id, &sdk.AlterSourceFailoverGroupOptions{
+ Unset: &sdk.FailoverGroupUnset{
+ ReplicationSchedule: sdk.Bool(true),
+ },
+ })
+ if err != nil {
+ return err
+ }
}
}
diff --git a/pkg/resources/failover_group_acceptance_test.go b/pkg/resources/failover_group_acceptance_test.go
index 4d5efad688..f44e039ad6 100644
--- a/pkg/resources/failover_group_acceptance_test.go
+++ b/pkg/resources/failover_group_acceptance_test.go
@@ -169,6 +169,18 @@ func TestAcc_FailoverGroupInterval(t *testing.T) {
resource.TestCheckResourceAttr("snowflake_failover_group.fg", "replication_schedule.0.cron.0.time_zone", "UTC"),
),
},
+ // Remove replication schedule
+ {
+ Config: failoverGroupWithoutReplicationSchedule(randomCharacters, accountName, acc.TestDatabaseName),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "name", randomCharacters),
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "object_types.#", "4"),
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "allowed_accounts.#", "1"),
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "allowed_databases.#", "1"),
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "allowed_integration_types.#", "1"),
+ resource.TestCheckResourceAttr("snowflake_failover_group.fg", "replication_schedule.#", "0"),
+ ),
+ },
// Change to Interval
{
Config: failoverGroupWithInterval(randomCharacters, accountName, 10, acc.TestDatabaseName),
@@ -289,6 +301,18 @@ resource "snowflake_failover_group" "fg" {
`, randomCharacters, accountName, databaseName, interval)
}
+func failoverGroupWithoutReplicationSchedule(randomCharacters, accountName string, databaseName string) string {
+ return fmt.Sprintf(`
+resource "snowflake_failover_group" "fg" {
+ name = "%s"
+ object_types = ["WAREHOUSES","DATABASES", "INTEGRATIONS", "ROLES"]
+ allowed_accounts= ["%s"]
+ allowed_databases = ["%s"]
+ allowed_integration_types = ["SECURITY INTEGRATIONS"]
+}
+`, randomCharacters, accountName, databaseName)
+}
+
func failoverGroupWithNoWarehouse(randomCharacters, accountName string, interval int) string {
return fmt.Sprintf(`
resource "snowflake_failover_group" "fg" {
diff --git a/pkg/resources/resource_monitor.go b/pkg/resources/resource_monitor.go
index 3376b2e225..a79f52d245 100644
--- a/pkg/resources/resource_monitor.go
+++ b/pkg/resources/resource_monitor.go
@@ -464,7 +464,7 @@ func collectResourceMonitorTriggers(d *schema.ResourceData) []sdk.TriggerDefinit
if v, ok := d.GetOk("suspend_immediate_triggers"); ok {
siTrigs := expandIntList(v.(*schema.Set).List())
for _, threshold := range siTrigs {
- if suspendImmediateTrigger == nil || suspendTrigger.Threshold > threshold {
+ if suspendImmediateTrigger == nil || (suspendTrigger != nil && suspendTrigger.Threshold > threshold) {
suspendImmediateTrigger = &sdk.TriggerDefinition{
Threshold: threshold,
TriggerAction: sdk.TriggerActionSuspendImmediate,
diff --git a/pkg/resources/stream.go b/pkg/resources/stream.go
index 294b95a9cf..7aa0701aaa 100644
--- a/pkg/resources/stream.go
+++ b/pkg/resources/stream.go
@@ -44,6 +44,7 @@ var streamSchema = map[string]*schema.Schema{
Description: "Specifies an identifier for the table the stream will monitor.",
ExactlyOneOf: []string{"on_table", "on_view", "on_stage"},
DiffSuppressFunc: suppressIdentifierQuoting,
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
"on_view": {
Type: schema.TypeString,
@@ -52,6 +53,7 @@ var streamSchema = map[string]*schema.Schema{
Description: "Specifies an identifier for the view the stream will monitor.",
ExactlyOneOf: []string{"on_table", "on_view", "on_stage"},
DiffSuppressFunc: suppressIdentifierQuoting,
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
"on_stage": {
Type: schema.TypeString,
@@ -63,6 +65,7 @@ var streamSchema = map[string]*schema.Schema{
// Suppress diff if the stage name is the same, even if database and schema are not specified
return strings.Trim(strings.Split(old, ".")[len(strings.Split(old, "."))-1], "\"") == strings.Trim(strings.Split(new, ".")[len(strings.Split(new, "."))-1], "\"")
},
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
"append_only": {
Type: schema.TypeBool,
diff --git a/pkg/resources/table_constraint.go b/pkg/resources/table_constraint.go
index 0f0972902e..fbdd00dcdb 100644
--- a/pkg/resources/table_constraint.go
+++ b/pkg/resources/table_constraint.go
@@ -38,10 +38,11 @@ var tableConstraintSchema = map[string]*schema.Schema{
},
},
"table_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- Description: `Identifier for table to create constraint on. Format must follow: "\"<db_name>\".\"<schema_name>\".\"<table_name>\"" or "<db_name>.<schema_name>.<table_name>" (snowflake_table.my_table.id)`,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: `Identifier for table to create constraint on. Format must follow: "\"<db_name>\".\"<schema_name>\".\"<table_name>\"" or "<db_name>.<schema_name>.<table_name>" (snowflake_table.my_table.id)`,
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
"columns": {
Type: schema.TypeList,
@@ -112,10 +113,10 @@ var tableConstraintSchema = map[string]*schema.Schema{
Schema: map[string]*schema.Schema{
"references": {
Type: schema.TypeList,
- Optional: true,
+ Required: true,
ForceNew: true,
MaxItems: 1,
- Description: "The table and columns that the foreign key references. Not applicable for primary/unique keys",
+ Description: "The table and columns that the foreign key references.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"table_id": {
diff --git a/pkg/resources/table_constraint_acceptance_test.go b/pkg/resources/table_constraint_acceptance_test.go
index 6030b7a0c8..9d1f65c371 100644
--- a/pkg/resources/table_constraint_acceptance_test.go
+++ b/pkg/resources/table_constraint_acceptance_test.go
@@ -237,12 +237,29 @@ func TestAcc_Table_issue2535_newConstraint(t *testing.T) {
Source: "Snowflake-Labs/snowflake",
},
},
- Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName),
+ Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "|"),
ExpectError: regexp.MustCompile(`.*table id is incorrect.*`),
},
+ {
+ ExternalProviders: map[string]resource.ExternalProvider{
+ "snowflake": {
+ VersionConstraint: "=0.89.0",
+ Source: "Snowflake-Labs/snowflake",
+ },
+ },
+ Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "|"),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttr("snowflake_table_constraint.unique", "type", "UNIQUE"),
+ ),
+ },
{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
- Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName),
+ Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "|"),
+ ExpectError: regexp.MustCompile(`.*Expected SchemaObjectIdentifier identifier type, but got:.*`),
+ },
+ {
+ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
+ Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "."),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table_constraint.unique", "type", "UNIQUE"),
),
@@ -270,7 +287,7 @@ func TestAcc_Table_issue2535_existingTable(t *testing.T) {
Source: "Snowflake-Labs/snowflake",
},
},
- Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName),
+ Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "|"),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table_constraint.unique", "type", "UNIQUE"),
),
@@ -332,6 +349,28 @@ func TestAcc_Table_issue2535_existingTable(t *testing.T) {
// })
//}
+func TestAcc_TableConstraint_ProperlyHandles_EmptyForeignKeyProperties(t *testing.T) {
+ id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
+
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
+ PreCheck: func() { acc.TestAccPreCheck(t) },
+ TerraformVersionChecks: []tfversion.TerraformVersionCheck{
+ tfversion.RequireAbove(tfversion.Version1_5_0),
+ },
+ CheckDestroy: nil,
+ Steps: []resource.TestStep{
+ {
+ Config: tableConstraintEmptyForeignKeyProperties(id.Name(), id.DatabaseName(), id.SchemaName()),
+ ExpectError: regexp.MustCompile(`At least 1 "references" blocks are required.`),
+ },
+ {
+ Config: tableConstraintForeignKeyProperties(id.Name(), id.DatabaseName(), id.SchemaName()),
+ },
+ },
+ })
+}
+
func tableConstraintUniqueConfigUsingFullyQualifiedName(n string, databaseName string, schemaName string) string {
return fmt.Sprintf(`
resource "snowflake_table" "t" {
@@ -354,12 +393,12 @@ resource "snowflake_table_constraint" "unique" {
`, n, databaseName, schemaName, n)
}
-func tableConstraintUniqueConfigUsingTableId(n string, databaseName string, schemaName string) string {
+func tableConstraintUniqueConfigUsingTableId(name string, databaseName string, schemaName string, idSeparator string) string {
return fmt.Sprintf(`
resource "snowflake_table" "t" {
- name = "%s"
- database = "%s"
- schema = "%s"
+ name = "%[1]s"
+ database = "%[2]s"
+ schema = "%[3]s"
column {
name = "col1"
@@ -368,33 +407,62 @@ resource "snowflake_table" "t" {
}
resource "snowflake_table_constraint" "unique" {
- name = "%s"
+ name = "%[1]s"
type = "UNIQUE"
- table_id = snowflake_table.t.id
+ table_id = "${snowflake_table.t.database}%[4]s${snowflake_table.t.schema}%[4]s${snowflake_table.t.name}"
columns = ["col1"]
}
-`, n, databaseName, schemaName, n)
+`, name, databaseName, schemaName, idSeparator)
}
-func tableConstraintWithNameAndComment(databaseName string, schemaName string, name string, comment string) string {
+func tableConstraintEmptyForeignKeyProperties(name string, databaseName string, schemaName string) string {
return fmt.Sprintf(`
-resource "snowflake_table" "t" {
- name = "%s"
- database = "%s"
- schema = "%s"
-
- column {
- name = "col1"
- type = "NUMBER(38,0)"
+ resource "snowflake_table" "t" {
+ name = "%[3]s"
+ database = "%[1]s"
+ schema = "%[2]s"
+
+ column {
+ name = "col1"
+ type = "NUMBER(38,0)"
+ }
+ }
+
+ resource "snowflake_table_constraint" "unique" {
+ name = "%[3]s"
+ type = "FOREIGN KEY"
+ table_id = snowflake_table.t.qualified_name
+ columns = ["col1"]
+ foreign_key_properties {
+ }
}
+ `, databaseName, schemaName, name)
}
-resource "snowflake_table_constraint" "test" {
- name = "%s"
- comment = "%s"
- type = "UNIQUE"
- table_id = snowflake_table.t.id
- columns = ["col1"]
-}
-`, name, databaseName, schemaName, name, comment)
+func tableConstraintForeignKeyProperties(name string, databaseName string, schemaName string) string {
+ return fmt.Sprintf(`
+ resource "snowflake_table" "t" {
+ name = "%[3]s"
+ database = "%[1]s"
+ schema = "%[2]s"
+
+ column {
+ name = "col1"
+ type = "NUMBER(38,0)"
+ }
+ }
+
+ resource "snowflake_table_constraint" "unique" {
+ name = "%[3]s"
+ type = "FOREIGN KEY"
+ table_id = snowflake_table.t.qualified_name
+ columns = ["col1"]
+ foreign_key_properties {
+ references {
+ columns = ["col1"]
+ table_id = snowflake_table.t.qualified_name
+ }
+ }
+ }
+ `, databaseName, schemaName, name)
}
diff --git a/pkg/resources/tag_masking_policy_association.go b/pkg/resources/tag_masking_policy_association.go
index ce55dfdff1..f8f570f207 100644
--- a/pkg/resources/tag_masking_policy_association.go
+++ b/pkg/resources/tag_masking_policy_association.go
@@ -23,16 +23,18 @@ const (
var mpAttachmentPolicySchema = map[string]*schema.Schema{
"tag_id": {
- Type: schema.TypeString,
- Required: true,
- Description: "Specifies the identifier for the tag. Note: format must follow: \"databaseName\".\"schemaName\".\"tagName\" or \"databaseName.schemaName.tagName\" or \"databaseName|schemaName.tagName\" (snowflake_tag.tag.id)",
- ForceNew: true,
+ Type: schema.TypeString,
+ Required: true,
+ Description: "Specifies the identifier for the tag. Note: format must follow: \"databaseName\".\"schemaName\".\"tagName\" or \"databaseName.schemaName.tagName\" or \"databaseName|schemaName.tagName\" (snowflake_tag.tag.id)",
+ ForceNew: true,
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
"masking_policy_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- Description: "The resource id of the masking policy",
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ Description: "The resource id of the masking policy",
+ ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](),
},
}
@@ -88,22 +90,32 @@ func TagMaskingPolicyAssociation() *schema.Resource {
func CreateContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
client := meta.(*provider.Context).Client
+
value := d.Get("tag_id").(string)
- tid := helpers.DecodeSnowflakeID(value).(sdk.SchemaObjectIdentifier)
+ tagObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(value)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ tagId := tagObjectIdentifier.(sdk.SchemaObjectIdentifier)
+
value = d.Get("masking_policy_id").(string)
- mid := helpers.DecodeSnowflakeID(value).(sdk.SchemaObjectIdentifier)
+ maskingPolicyObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(value)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ maskingPolicyId := maskingPolicyObjectIdentifier.(sdk.SchemaObjectIdentifier)
- set := sdk.NewTagSetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{mid})
- if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tid).WithSet(set)); err != nil {
+ set := sdk.NewTagSetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{maskingPolicyId})
+ if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tagId).WithSet(set)); err != nil {
return diag.FromErr(err)
}
aid := attachmentID{
- TagDatabaseName: tid.DatabaseName(),
- TagSchemaName: tid.SchemaName(),
- TagName: tid.Name(),
- MaskingPolicyDatabaseName: mid.DatabaseName(),
- MaskingPolicySchemaName: mid.SchemaName(),
- MaskingPolicyName: mid.Name(),
+ TagDatabaseName: tagId.DatabaseName(),
+ TagSchemaName: tagId.SchemaName(),
+ TagName: tagId.Name(),
+ MaskingPolicyDatabaseName: maskingPolicyId.DatabaseName(),
+ MaskingPolicySchemaName: maskingPolicyId.SchemaName(),
+ MaskingPolicyName: maskingPolicyId.Name(),
}
fmt.Printf("attachment id: %s\n", aid.String())
d.SetId(aid.String())
@@ -145,7 +157,7 @@ func ReadContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.Resou
mid := sdk.NewSchemaObjectIdentifier(aid.MaskingPolicyDatabaseName, aid.MaskingPolicySchemaName, aid.MaskingPolicyName)
builder := snowflake.NewTagBuilder(tid).WithMaskingPolicy(mid)
row := snowflake.QueryRow(db, builder.ShowAttachedPolicy())
- t, err := snowflake.ScanTagPolicy(row)
+ _, err = snowflake.ScanTagPolicy(row)
if errors.Is(err, sql.ErrNoRows) {
// If not found, mark resource to be removed from state file during apply or refresh
log.Printf("[DEBUG] attached policy (%s) not found", d.Id())
@@ -155,10 +167,7 @@ func ReadContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.Resou
if err != nil {
return diag.FromErr(err)
}
- id := helpers.EncodeSnowflakeID(t.PolicyDB.String, t.PolicySchema.String, t.PolicyName.String)
- if err := d.Set("masking_policy_id", id); err != nil {
- return diag.FromErr(err)
- }
+
return diags
}
diff --git a/pkg/resources/tag_masking_policy_association_acceptance_test.go b/pkg/resources/tag_masking_policy_association_acceptance_test.go
index d462e5555c..06d51bc637 100644
--- a/pkg/resources/tag_masking_policy_association_acceptance_test.go
+++ b/pkg/resources/tag_masking_policy_association_acceptance_test.go
@@ -24,8 +24,8 @@ func TestAcc_TagMaskingPolicyAssociationBasic(t *testing.T) {
{
Config: tagAttachmentConfig(accName, acc.TestDatabaseName, acc.TestSchemaName),
Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, accName)),
- resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, accName)),
+ resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, accName)),
+ resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, accName)),
),
},
},
@@ -65,11 +65,10 @@ func TestAcc_TagMaskingPolicyAssociationsystem_functions_integration_testComplet
resource.TestCheckResourceAttr(resourceName, "allowed_values.1", "alv2"),
resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"),
- resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, name)),
- resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, name)),
+ resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, name)),
+ resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, name)),
),
},
-
// test - change comment
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_TagMaskingPolicyAssociation/basic"),
@@ -81,7 +80,6 @@ func TestAcc_TagMaskingPolicyAssociationsystem_functions_integration_testComplet
resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"),
),
},
-
// test - import
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_TagMaskingPolicyAssociation/basic"),
@@ -120,8 +118,8 @@ resource "snowflake_masking_policy" "test" {
}
resource "snowflake_tag_masking_policy_association" "test" {
- tag_id = snowflake_tag.test.id
- masking_policy_id = snowflake_masking_policy.test.id
+ tag_id = "${snowflake_tag.test.database}.${snowflake_tag.test.schema}.${snowflake_tag.test.name}"
+ masking_policy_id = "${snowflake_masking_policy.test.database}.${snowflake_masking_policy.test.schema}.${snowflake_masking_policy.test.name}"
}
`, n, databaseName, schemaName)
}
diff --git a/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf b/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf
index be44d85425..0851584507 100644
--- a/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf
+++ b/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf
@@ -22,6 +22,6 @@ resource "snowflake_masking_policy" "test" {
}
resource "snowflake_tag_masking_policy_association" "test" {
- tag_id = snowflake_tag.test.id
- masking_policy_id = snowflake_masking_policy.test.id
+ tag_id = "${snowflake_tag.test.database}.${snowflake_tag.test.schema}.${snowflake_tag.test.name}"
+ masking_policy_id = "${snowflake_masking_policy.test.database}.${snowflake_masking_policy.test.schema}.${snowflake_masking_policy.test.name}"
}
diff --git a/pkg/sdk/failover_groups.go b/pkg/sdk/failover_groups.go
index 094ae6bf36..9ff342b2c8 100644
--- a/pkg/sdk/failover_groups.go
+++ b/pkg/sdk/failover_groups.go
@@ -144,6 +144,7 @@ type AlterSourceFailoverGroupOptions struct {
name AccountObjectIdentifier `ddl:"identifier"`
NewName AccountObjectIdentifier `ddl:"identifier" sql:"RENAME TO"`
Set *FailoverGroupSet `ddl:"keyword" sql:"SET"`
+ Unset *FailoverGroupUnset `ddl:"list,no_parentheses" sql:"UNSET"`
Add *FailoverGroupAdd `ddl:"keyword" sql:"ADD"`
Move *FailoverGroupMove `ddl:"keyword" sql:"MOVE"`
Remove *FailoverGroupRemove `ddl:"keyword" sql:"REMOVE"`
@@ -157,14 +158,19 @@ func (opts *AlterSourceFailoverGroupOptions) validate() error {
if !ValidObjectIdentifier(opts.name) {
errs = append(errs, ErrInvalidObjectIdentifier)
}
- if !exactlyOneValueSet(opts.Set, opts.Add, opts.Move, opts.Remove, opts.NewName) {
- errs = append(errs, errExactlyOneOf("AlterSourceFailoverGroupOptions", "Set", "Add", "Move", "Remove", "NewName"))
+ if !exactlyOneValueSet(opts.Set, opts.Unset, opts.Add, opts.Move, opts.Remove, opts.NewName) {
+ errs = append(errs, errExactlyOneOf("AlterSourceFailoverGroupOptions", "Set", "Unset", "Add", "Move", "Remove", "NewName"))
}
if valueSet(opts.Set) {
if err := opts.Set.validate(); err != nil {
errs = append(errs, err)
}
}
+ if valueSet(opts.Unset) {
+ if err := opts.Unset.validate(); err != nil {
+ errs = append(errs, err)
+ }
+ }
if valueSet(opts.Add) {
if err := opts.Add.validate(); err != nil {
errs = append(errs, err)
@@ -199,6 +205,17 @@ func (v *FailoverGroupSet) validate() error {
return nil
}
+type FailoverGroupUnset struct {
+ ReplicationSchedule *bool `ddl:"keyword" sql:"REPLICATION_SCHEDULE"`
+}
+
+func (v *FailoverGroupUnset) validate() error {
+ if everyValueNil(v.ReplicationSchedule) {
+ return errAtLeastOneOf("FailoverGroupUnset", "ReplicationSchedule")
+ }
+ return nil
+}
+
type FailoverGroupAdd struct {
AllowedDatabases []AccountObjectIdentifier `ddl:"parameter,reverse" sql:"TO ALLOWED_DATABASES"`
AllowedShares []AccountObjectIdentifier `ddl:"parameter,reverse" sql:"TO ALLOWED_SHARES"`
diff --git a/pkg/sdk/failover_groups_test.go b/pkg/sdk/failover_groups_test.go
index 4463890c99..9a7e6e1850 100644
--- a/pkg/sdk/failover_groups_test.go
+++ b/pkg/sdk/failover_groups_test.go
@@ -55,6 +55,24 @@ func TestCreateSecondaryReplicationGroup(t *testing.T) {
func TestFailoverGroupAlterSource(t *testing.T) {
id := NewAccountObjectIdentifier("fg1")
+ t.Run("validate: empty unset", func(t *testing.T) {
+ opts := &AlterSourceFailoverGroupOptions{
+ name: id,
+ Unset: &FailoverGroupUnset{},
+ }
+ assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("FailoverGroupUnset", "ReplicationSchedule"))
+ })
+
+ t.Run("unset replication schedule", func(t *testing.T) {
+ opts := &AlterSourceFailoverGroupOptions{
+ name: id,
+ Unset: &FailoverGroupUnset{
+ ReplicationSchedule: Bool(true),
+ },
+ }
+ assertOptsValidAndSQLEquals(t, opts, `ALTER FAILOVER GROUP "fg1" UNSET REPLICATION_SCHEDULE`)
+ })
+
t.Run("rename", func(t *testing.T) {
opts := &AlterSourceFailoverGroupOptions{
name: id,
diff --git a/pkg/sdk/testint/failover_groups_integration_test.go b/pkg/sdk/testint/failover_groups_integration_test.go
index c9571a153b..0ff27469cf 100644
--- a/pkg/sdk/testint/failover_groups_integration_test.go
+++ b/pkg/sdk/testint/failover_groups_integration_test.go
@@ -341,6 +341,7 @@ func TestInt_FailoverGroupsAlterSource(t *testing.T) {
failoverGroup, cleanupFailoverGroup := testClientHelper().FailoverGroup.CreateFailoverGroup(t)
t.Cleanup(cleanupFailoverGroup)
replicationSchedule := "USING CRON 0 0 10-20 * TUE,THU UTC"
+
opts := &sdk.AlterSourceFailoverGroupOptions{
Set: &sdk.FailoverGroupSet{
ReplicationSchedule: &replicationSchedule,
@@ -348,9 +349,22 @@ func TestInt_FailoverGroupsAlterSource(t *testing.T) {
}
err := client.FailoverGroups.AlterSource(ctx, failoverGroup.ID(), opts)
require.NoError(t, err)
+
failoverGroup, err = client.FailoverGroups.ShowByID(ctx, failoverGroup.ID())
require.NoError(t, err)
assert.Equal(t, replicationSchedule, failoverGroup.ReplicationSchedule)
+
+ opts = &sdk.AlterSourceFailoverGroupOptions{
+ Unset: &sdk.FailoverGroupUnset{
+ ReplicationSchedule: sdk.Bool(true),
+ },
+ }
+ err = client.FailoverGroups.AlterSource(ctx, failoverGroup.ID(), opts)
+ require.NoError(t, err)
+
+ failoverGroup, err = client.FailoverGroups.ShowByID(ctx, failoverGroup.ID())
+ require.NoError(t, err)
+ require.Empty(t, failoverGroup.ReplicationSchedule)
})
t.Run("add and remove database account object", func(t *testing.T) {