diff --git a/pkg/acceptance/testing.go b/pkg/acceptance/testing.go index ed526c3937..a9601b1e5b 100644 --- a/pkg/acceptance/testing.go +++ b/pkg/acceptance/testing.go @@ -2,6 +2,8 @@ package acceptance import ( "context" + "path/filepath" + "strconv" "sync" "testing" @@ -10,6 +12,7 @@ import ( "github.com/hashicorp/terraform-plugin-go/tfprotov6" "github.com/hashicorp/terraform-plugin-mux/tf5to6server" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-testing/config" ) const ( @@ -72,3 +75,11 @@ func TestAccPreCheck(t *testing.T) { } }) } + +// ConfigurationSameAsStepN should be used to obtain configuration for one of the previous steps to avoid duplication of configuration and var files. +// Based on config.TestStepDirectory. +func ConfigurationSameAsStepN(step int) func(config.TestStepConfigRequest) string { + return func(req config.TestStepConfigRequest) string { + return filepath.Join("testdata", req.TestName, strconv.Itoa(step)) + } +} diff --git a/pkg/resources/common.go b/pkg/resources/common.go new file mode 100644 index 0000000000..909306f301 --- /dev/null +++ b/pkg/resources/common.go @@ -0,0 +1,25 @@ +package resources + +import ( + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// DiffSuppressStatement will suppress diffs between statements if they differ in only case or in +// runs of whitespace (\s+ = \s). This is needed because the snowflake api does not faithfully +// round-trip queries, so we cannot do a simple character-wise comparison to detect changes. +// +// Warnings: We will have false positives in cases where a change in case or run of whitespace is +// semantically significant. +// +// If we can find a sql parser that can handle the snowflake dialect then we should switch to parsing +// queries and either comparing ASTs or emitting a canonical serialization for comparison. I couldn't +// find such a library. +func DiffSuppressStatement(_, old, new string, _ *schema.ResourceData) bool { + return strings.EqualFold(normalizeQuery(old), normalizeQuery(new)) +} + +func normalizeQuery(str string) string { + return strings.TrimSpace(space.ReplaceAllString(str, " ")) +} diff --git a/pkg/resources/dynamic_table.go b/pkg/resources/dynamic_table.go index a52852102a..3b452ba061 100644 --- a/pkg/resources/dynamic_table.go +++ b/pkg/resources/dynamic_table.go @@ -63,9 +63,10 @@ var dynamicTableShema = map[string]*schema.Schema{ ForceNew: true, }, "query": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the query to use to populate the dynamic table.", + Type: schema.TypeString, + Required: true, + Description: "Specifies the query to use to populate the dynamic table.", + DiffSuppressFunc: DiffSuppressStatement, }, "comment": { Type: schema.TypeString, @@ -298,20 +299,25 @@ func UpdateDynamicTable(d *schema.ResourceData, meta interface{}) error { id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) request := sdk.NewAlterDynamicTableRequest(id) + runSet := false set := sdk.NewDynamicTableSetRequest() if d.HasChange("target_lag") { tl := parseTargetLag(d.Get("target_lag")) set.WithTargetLag(tl) + runSet = true } if d.HasChange("warehouse") { warehouseName := d.Get("warehouse").(string) set.WithWarehouse(sdk.NewAccountObjectIdentifier(warehouseName)) + runSet = true } - request.WithSet(set) - if err := client.DynamicTables.Alter(ctx, request); err != nil { - return err + if runSet { + request.WithSet(set) + if err := client.DynamicTables.Alter(ctx, request); err != nil { + return err + } } if d.HasChange("comment") { diff --git a/pkg/resources/dynamic_table_acceptance_test.go b/pkg/resources/dynamic_table_acceptance_test.go index 1e661c304e..34edb137cb 100644 --- a/pkg/resources/dynamic_table_acceptance_test.go +++ b/pkg/resources/dynamic_table_acceptance_test.go @@ -13,6 +13,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) @@ -99,6 +100,142 @@ func TestAcc_DynamicTable_basic(t *testing.T) { }) } +// TestAcc_DynamicTable_issue2173 proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2173 issue. +func TestAcc_DynamicTable_issue2173(t *testing.T) { + dynamicTableName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + tableName := dynamicTableName + "_table" + query := fmt.Sprintf(`select "id" from "%v"."%v"."%v"`, acc.TestDatabaseName, acc.TestSchemaName, tableName) + otherSchema := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + m := func() map[string]config.Variable { + return map[string]config.Variable{ + "name": config.StringVariable(dynamicTableName), + "database": config.StringVariable(acc.TestDatabaseName), + "schema": config.StringVariable(acc.TestSchemaName), + "warehouse": config.StringVariable(acc.TestWarehouseName), + "query": config.StringVariable(query), + "comment": config.StringVariable("Terraform acceptance test for GH issue 2173"), + "table_name": config.StringVariable(tableName), + "other_schema": config.StringVariable(otherSchema), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDynamicTableDestroy, + Steps: []resource.TestStep{ + { + ConfigDirectory: config.TestStepDirectory(), + ConfigVariables: m(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.other_schema", "name", otherSchema), + resource.TestCheckResourceAttr("snowflake_table.t", "name", tableName), + ), + }, + { + PreConfig: func() { createDynamicTableOutsideTerraform(t, otherSchema, dynamicTableName, query) }, + ConfigDirectory: config.TestStepDirectory(), + ConfigVariables: m(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_dynamic_table.dt", "name", dynamicTableName), + ), + }, + { + // We use the same config here as in the previous step so the plan should be empty. + ConfigDirectory: acc.ConfigurationSameAsStepN(2), + ConfigVariables: m(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + /* + * Before the fix this step resulted in + * # snowflake_dynamic_table.dt will be updated in-place + * ~ resource "snowflake_dynamic_table" "dt" { + * + comment = "Terraform acceptance test for GH issue 2173" + * id = "terraform_test_database|terraform_test_schema|SFVNXKJFAA" + * name = "SFVNXKJFAA" + * ~ schema = "MEYIYWUGGO" -> "terraform_test_schema" + * # (14 unchanged attributes hidden) + * } + * which matches the issue description exactly (issue mentioned also query being changed but here for simplicity the same underlying table and query were used). + */ + PreApply: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, + }, + }, + }, + }) +} + +// TestAcc_DynamicTable_issue2134 proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2134 issue. +func TestAcc_DynamicTable_issue2134(t *testing.T) { + dynamicTableName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + tableName := dynamicTableName + "_table" + // whitespace (initial tab) is added on purpose here + query := fmt.Sprintf(` select "id" from "%v"."%v"."%v"`, acc.TestDatabaseName, acc.TestSchemaName, tableName) + m := func() map[string]config.Variable { + return map[string]config.Variable{ + "name": config.StringVariable(dynamicTableName), + "database": config.StringVariable(acc.TestDatabaseName), + "schema": config.StringVariable(acc.TestSchemaName), + "warehouse": config.StringVariable(acc.TestWarehouseName), + "query": config.StringVariable(query), + "comment": config.StringVariable("Terraform acceptance test for GH issue 2134"), + "table_name": config.StringVariable(tableName), + } + } + m2 := m() + m2["comment"] = config.StringVariable("Changed comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDynamicTableDestroy, + Steps: []resource.TestStep{ + /* + * Before the fix the first step resulted in not empty plan (as expected) + * # snowflake_dynamic_table.dt will be updated in-place + * ~ resource "snowflake_dynamic_table" "dt" { + * id = "terraform_test_database|terraform_test_schema|IKLBYWKSOV" + * name = "IKLBYWKSOV" + * ~ query = "select \"id\" from \"terraform_test_database\".\"terraform_test_schema\".\"IKLBYWKSOV_table\"" -> "\tselect \"id\" from \"terraform_test_database\".\"terraform_test_schema\".\"IKLBYWKSOV_table\"" + * # (15 unchanged attributes hidden) + * } + * which matches the issue description exactly. + */ + { + ConfigDirectory: config.TestStepDirectory(), + ConfigVariables: m(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_dynamic_table.dt", "name", dynamicTableName), + ), + }, + /* + * Before the fix the second step resulted in SQL error (as expected) + * Error: 001003 (42000): SQL compilation error: + * syntax error line 1 at position 86 unexpected ''. + * which matches the issue description exactly. + */ + { + ConfigDirectory: acc.ConfigurationSameAsStepN(1), + ConfigVariables: m2, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_dynamic_table.dt", "name", dynamicTableName), + ), + }, + }, + }) +} + func testAccCheckDynamicTableDestroy(s *terraform.State) error { db := acc.TestAccProvider.Meta().(*sql.DB) client := sdk.NewClientFromDB(db) @@ -115,3 +252,17 @@ func testAccCheckDynamicTableDestroy(s *terraform.State) error { } return nil } + +func createDynamicTableOutsideTerraform(t *testing.T, schemaName string, dynamicTableName string, query string) { + t.Helper() + client, err := sdk.NewDefaultClient() + if err != nil { + t.Fatal(err) + } + ctx := context.Background() + + dynamicTableId := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, schemaName, dynamicTableName) + if err := client.DynamicTables.Create(ctx, sdk.NewCreateDynamicTableRequest(dynamicTableId, sdk.NewAccountObjectIdentifier(acc.TestWarehouseName), sdk.TargetLag{MaximumDuration: sdk.String("2 minutes")}, query)); err != nil { + t.Fatal(fmt.Errorf("error creating dynamic table: %w", err)) + } +} diff --git a/pkg/resources/file_format.go b/pkg/resources/file_format.go index 9caab6e0d1..1433783155 100644 --- a/pkg/resources/file_format.go +++ b/pkg/resources/file_format.go @@ -352,7 +352,9 @@ func CreateFileFormat(d *schema.ResourceData, meta interface{}) error { opts.CSVFileExtension = sdk.String(v.(string)) } opts.CSVParseHeader = sdk.Bool(d.Get("parse_header").(bool)) - opts.CSVSkipHeader = sdk.Int(d.Get("skip_header").(int)) + if v, ok := d.GetOk("skip_header"); ok { + opts.CSVSkipHeader = sdk.Int(v.(int)) + } opts.CSVSkipBlankLines = sdk.Bool(d.Get("skip_blank_lines").(bool)) if v, ok := d.GetOk("date_format"); ok { opts.CSVDateFormat = sdk.String(v.(string)) @@ -778,6 +780,7 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { id = newId } + runSet := false opts := sdk.AlterFileFormatOptions{} switch d.Get("format_type") { @@ -785,62 +788,77 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { if d.HasChange("compression") { v := sdk.CSVCompression(d.Get("compression").(string)) opts.Set.CSVCompression = &v + runSet = true } if d.HasChange("record_delimiter") { v := d.Get("record_delimiter").(string) opts.Set.CSVRecordDelimiter = &v + runSet = true } if d.HasChange("field_delimiter") { v := d.Get("field_delimiter").(string) opts.Set.CSVFieldDelimiter = &v + runSet = true } if d.HasChange("file_extension") { v := d.Get("file_extension").(string) opts.Set.CSVFileExtension = &v + runSet = true } if d.HasChange("parse_header") { v := d.Get("parse_header").(bool) opts.Set.CSVParseHeader = &v + runSet = true } if d.HasChange("skip_header") { v := d.Get("skip_header").(int) opts.Set.CSVSkipHeader = &v + runSet = true } if d.HasChange("skip_blank_lines") { v := d.Get("skip_blank_lines").(bool) opts.Set.CSVSkipBlankLines = &v + runSet = true } if d.HasChange("date_format") { v := d.Get("date_format").(string) opts.Set.CSVDateFormat = &v + runSet = true } if d.HasChange("time_format") { v := d.Get("time_format").(string) opts.Set.CSVTimeFormat = &v + runSet = true } if d.HasChange("timestamp_format") { v := d.Get("timestamp_format").(string) opts.Set.CSVTimestampFormat = &v + runSet = true } if d.HasChange("binary_format") { v := sdk.BinaryFormat(d.Get("binary_format").(string)) opts.Set.CSVBinaryFormat = &v + runSet = true } if d.HasChange("escape") { v := d.Get("escape").(string) opts.Set.CSVEscape = &v + runSet = true } if d.HasChange("escape_unenclosed_field") { v := d.Get("escape_unenclosed_field").(string) opts.Set.CSVEscapeUnenclosedField = &v + runSet = true } if d.HasChange("trim_space") { v := d.Get("trim_space").(bool) opts.Set.CSVTrimSpace = &v + runSet = true } if d.HasChange("field_optionally_enclosed_by") { v := d.Get("field_optionally_enclosed_by").(string) opts.Set.CSVFieldOptionallyEnclosedBy = &v + runSet = true } if d.HasChange("null_if") { nullIf := []sdk.NullString{} @@ -853,51 +871,63 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { nullIf = append(nullIf, sdk.NullString{S: s.(string)}) } opts.Set.CSVNullIf = &nullIf + runSet = true } if d.HasChange("error_on_column_count_mismatch") { v := d.Get("error_on_column_count_mismatch").(bool) opts.Set.CSVErrorOnColumnCountMismatch = &v + runSet = true } if d.HasChange("replace_invalid_characters") { v := d.Get("replace_invalid_characters").(bool) opts.Set.CSVReplaceInvalidCharacters = &v + runSet = true } if d.HasChange("empty_field_as_null") { v := d.Get("empty_field_as_null").(bool) opts.Set.CSVEmptyFieldAsNull = &v + runSet = true } if d.HasChange("skip_byte_order_mark") { v := d.Get("skip_byte_order_mark").(bool) opts.Set.CSVSkipByteOrderMark = &v + runSet = true } if d.HasChange("encoding") { v := sdk.CSVEncoding(d.Get("encoding").(string)) opts.Set.CSVEncoding = &v + runSet = true } case sdk.FileFormatTypeJSON: if d.HasChange("compression") { comp := sdk.JSONCompression(d.Get("compression").(string)) opts.Set.JSONCompression = &comp + runSet = true } if d.HasChange("date_format") { v := d.Get("date_format").(string) opts.Set.JSONDateFormat = &v + runSet = true } if d.HasChange("time_format") { v := d.Get("time_format").(string) opts.Set.JSONTimeFormat = &v + runSet = true } if d.HasChange("timestamp_format") { v := d.Get("timestamp_format").(string) opts.Set.JSONTimestampFormat = &v + runSet = true } if d.HasChange("binary_format") { v := sdk.BinaryFormat(d.Get("binary_format").(string)) opts.Set.JSONBinaryFormat = &v + runSet = true } if d.HasChange("trim_space") { v := d.Get("trim_space").(bool) opts.Set.JSONTrimSpace = &v + runSet = true } if d.HasChange("null_if") { nullIf := []sdk.NullString{} @@ -910,47 +940,58 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { nullIf = append(nullIf, sdk.NullString{S: s.(string)}) } opts.Set.JSONNullIf = &nullIf + runSet = true } if d.HasChange("file_extension") { v := d.Get("file_extension").(string) opts.Set.JSONFileExtension = &v + runSet = true } if d.HasChange("enable_octal") { v := d.Get("enable_octal").(bool) opts.Set.JSONEnableOctal = &v + runSet = true } if d.HasChange("allow_duplicate") { v := d.Get("allow_duplicate").(bool) opts.Set.JSONAllowDuplicate = &v + runSet = true } if d.HasChange("strip_outer_array") { v := d.Get("strip_outer_array").(bool) opts.Set.JSONStripOuterArray = &v + runSet = true } if d.HasChange("strip_null_values") { v := d.Get("strip_null_values").(bool) opts.Set.JSONStripNullValues = &v + runSet = true } if d.HasChange("replace_invalid_characters") { v := d.Get("replace_invalid_characters").(bool) opts.Set.JSONReplaceInvalidCharacters = &v + runSet = true } if d.HasChange("ignore_utf8_errors") { v := d.Get("ignore_utf8_errors").(bool) opts.Set.JSONIgnoreUTF8Errors = &v + runSet = true } if d.HasChange("skip_byte_order_mark") { v := d.Get("skip_byte_order_mark").(bool) opts.Set.JSONSkipByteOrderMark = &v + runSet = true } case sdk.FileFormatTypeAvro: if d.HasChange("compression") { comp := sdk.AvroCompression(d.Get("compression").(string)) opts.Set.AvroCompression = &comp + runSet = true } if d.HasChange("trim_space") { v := d.Get("trim_space").(bool) opts.Set.AvroTrimSpace = &v + runSet = true } if d.HasChange("null_if") { nullIf := []sdk.NullString{} @@ -963,11 +1004,13 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { nullIf = append(nullIf, sdk.NullString{S: s.(string)}) } opts.Set.AvroNullIf = &nullIf + runSet = true } case sdk.FileFormatTypeORC: if d.HasChange("trim_space") { v := d.Get("trim_space").(bool) opts.Set.ORCTrimSpace = &v + runSet = true } if d.HasChange("null_if") { nullIf := []sdk.NullString{} @@ -980,19 +1023,23 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { nullIf = append(nullIf, sdk.NullString{S: s.(string)}) } opts.Set.ORCNullIf = &nullIf + runSet = true } case sdk.FileFormatTypeParquet: if d.HasChange("compression") { comp := sdk.ParquetCompression(d.Get("compression").(string)) opts.Set.ParquetCompression = &comp + runSet = true } if d.HasChange("binary_as_text") { v := d.Get("binary_as_text").(bool) opts.Set.ParquetBinaryAsText = &v + runSet = true } if d.HasChange("trim_space") { v := d.Get("trim_space").(bool) opts.Set.ParquetTrimSpace = &v + runSet = true } if d.HasChange("null_if") { nullIf := []sdk.NullString{} @@ -1005,42 +1052,51 @@ func UpdateFileFormat(d *schema.ResourceData, meta interface{}) error { nullIf = append(nullIf, sdk.NullString{S: s.(string)}) } opts.Set.ParquetNullIf = &nullIf + runSet = true } case sdk.FileFormatTypeXML: if d.HasChange("compression") { comp := sdk.XMLCompression(d.Get("compression").(string)) opts.Set.XMLCompression = &comp + runSet = true } if d.HasChange("ignore_utf8_errors") { v := d.Get("ignore_utf8_errors").(bool) opts.Set.XMLIgnoreUTF8Errors = &v + runSet = true } if d.HasChange("preserve_space") { v := d.Get("preserve_space").(bool) opts.Set.XMLPreserveSpace = &v + runSet = true } if d.HasChange("strip_outer_element") { v := d.Get("strip_outer_element").(bool) opts.Set.XMLStripOuterElement = &v + runSet = true } if d.HasChange("disable_snowflake_data") { v := d.Get("disable_snowflake_data").(bool) opts.Set.XMLDisableSnowflakeData = &v + runSet = true } if d.HasChange("disable_auto_convert") { v := d.Get("disable_auto_convert").(bool) opts.Set.XMLDisableAutoConvert = &v + runSet = true } if d.HasChange("skip_byte_order_mark") { v := d.Get("skip_byte_order_mark").(bool) opts.Set.XMLSkipByteOrderMark = &v + runSet = true } } - err = client.FileFormats.Alter(ctx, id, &opts) - - if err != nil { - return err + if runSet { + err = client.FileFormats.Alter(ctx, id, &opts) + if err != nil { + return err + } } return ReadFileFormat(d, meta) diff --git a/pkg/resources/file_format_acceptance_test.go b/pkg/resources/file_format_acceptance_test.go index 0df26e29d2..1893f0e759 100644 --- a/pkg/resources/file_format_acceptance_test.go +++ b/pkg/resources/file_format_acceptance_test.go @@ -7,6 +7,7 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" ) func TestAcc_FileFormatCSV(t *testing.T) { @@ -28,7 +29,7 @@ func TestAcc_FileFormatCSV(t *testing.T) { resource.TestCheckResourceAttr("snowflake_file_format.test", "record_delimiter", "\r"), resource.TestCheckResourceAttr("snowflake_file_format.test", "field_delimiter", ";"), resource.TestCheckResourceAttr("snowflake_file_format.test", "file_extension", ".ssv"), - resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_header", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "parse_header", "true"), resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_blank_lines", "true"), resource.TestCheckResourceAttr("snowflake_file_format.test", "date_format", "YYY-MM-DD"), resource.TestCheckResourceAttr("snowflake_file_format.test", "time_format", "HH24:MI"), @@ -363,6 +364,40 @@ func TestAcc_FileFormatXMLDefaults(t *testing.T) { }) } +// TestAcc_FileFormat_issue1947 proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1947 issue. +func TestAcc_FileFormat_issue1947(t *testing.T) { + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: fileFormatConfigFullDefaults(name, "XML", acc.TestDatabaseName, acc.TestSchemaName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", name), + ), + }, + /* + * Before the fix this step resulted in + * Error: only one of Rename or Set can be set at once. + * which matches the issue description exactly + */ + { + // we set param which is not right for XML but this allows us to run update on terraform apply + Config: fileFormatConfigFullDefaultsWithAdditionalParam(name, "XML", acc.TestDatabaseName, acc.TestSchemaName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", name), + ), + }, + }, + }) +} + func fileFormatConfigCSV(n string, databaseName string, schemaName string) string { return fmt.Sprintf(` resource "snowflake_file_format" "test" { @@ -374,7 +409,7 @@ resource "snowflake_file_format" "test" { record_delimiter = "\r" field_delimiter = ";" file_extension = ".ssv" - skip_header = 1 + parse_header = true skip_blank_lines = true date_format = "YYY-MM-DD" time_format = "HH24:MI" @@ -495,3 +530,15 @@ resource "snowflake_file_format" "test" { } `, n, databaseName, schemaName, formatType) } + +func fileFormatConfigFullDefaultsWithAdditionalParam(n string, formatType string, databaseName string, schemaName string) string { + return fmt.Sprintf(` +resource "snowflake_file_format" "test" { + name = "%v" + database = "%s" + schema = "%s" + format_type = "%s" + encoding = "UTF-16" +} +`, n, databaseName, schemaName, formatType) +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/test.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/test.tf new file mode 100644 index 0000000000..ae737b1923 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/test.tf @@ -0,0 +1,23 @@ +resource "snowflake_table" "t" { + database = var.database + schema = var.schema + name = var.table_name + change_tracking = true + column { + name = "id" + type = "NUMBER(38,0)" + } +} + +resource "snowflake_dynamic_table" "dt" { + depends_on = [snowflake_table.t] + name = var.name + database = var.database + schema = var.schema + target_lag { + maximum_duration = "2 minutes" + } + warehouse = var.warehouse + query = var.query + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/variables.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/variables.tf new file mode 100644 index 0000000000..462d6218dd --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2134/1/variables.tf @@ -0,0 +1,27 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "warehouse" { + type = string +} + +variable "query" { + type = string +} + +variable "comment" { + type = string +} + +variable "table_name" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/test.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/test.tf new file mode 100644 index 0000000000..d32631d2e2 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/test.tf @@ -0,0 +1,16 @@ +resource "snowflake_schema" "other_schema" { + database = var.database + name = var.other_schema + comment = "Other schema" +} + +resource "snowflake_table" "t" { + database = var.database + schema = var.schema + name = var.table_name + change_tracking = true + column { + name = "id" + type = "NUMBER(38,0)" + } +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/variables.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/variables.tf new file mode 100644 index 0000000000..e0f023624b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/1/variables.tf @@ -0,0 +1,31 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "warehouse" { + type = string +} + +variable "query" { + type = string +} + +variable "comment" { + type = string +} + +variable "table_name" { + type = string +} + +variable "other_schema" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/test.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/test.tf new file mode 100644 index 0000000000..6cd68399c6 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/test.tf @@ -0,0 +1,29 @@ +resource "snowflake_schema" "other_schema" { + database = var.database + name = var.other_schema + comment = "Other schema" +} + +resource "snowflake_table" "t" { + database = var.database + schema = var.schema + name = var.table_name + change_tracking = true + column { + name = "id" + type = "NUMBER(38,0)" + } +} + +resource "snowflake_dynamic_table" "dt" { + depends_on = [snowflake_table.t] + name = var.name + database = var.database + schema = var.schema + target_lag { + maximum_duration = "2 minutes" + } + warehouse = var.warehouse + query = var.query + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/variables.tf b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/variables.tf new file mode 100644 index 0000000000..e0f023624b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_DynamicTable_issue2173/2/variables.tf @@ -0,0 +1,31 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "warehouse" { + type = string +} + +variable "query" { + type = string +} + +variable "comment" { + type = string +} + +variable "table_name" { + type = string +} + +variable "other_schema" { + type = string +} diff --git a/pkg/resources/view.go b/pkg/resources/view.go index 3007c48612..9574cc6624 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -74,24 +74,6 @@ var viewSchema = map[string]*schema.Schema{ "tag": tagReferenceSchema, } -func normalizeQuery(str string) string { - return strings.TrimSpace(space.ReplaceAllString(str, " ")) -} - -// DiffSuppressStatement will suppress diffs between statements if they differ in only case or in -// runs of whitespace (\s+ = \s). This is needed because the snowflake api does not faithfully -// round-trip queries, so we cannot do a simple character-wise comparison to detect changes. -// -// Warnings: We will have false positives in cases where a change in case or run of whitespace is -// semantically significant. -// -// If we can find a sql parser that can handle the snowflake dialect then we should switch to parsing -// queries and either comparing ASTs or emitting a canonical serialization for comparison. I couldn't -// find such a library. -func DiffSuppressStatement(_, old, new string, _ *schema.ResourceData) bool { - return strings.EqualFold(normalizeQuery(old), normalizeQuery(new)) -} - // View returns a pointer to the resource representing a view. func View() *schema.Resource { return &schema.Resource{ diff --git a/pkg/sdk/dynamic_table_impl.go b/pkg/sdk/dynamic_table_impl.go index 86d41c957b..bb7bd40e9e 100644 --- a/pkg/sdk/dynamic_table_impl.go +++ b/pkg/sdk/dynamic_table_impl.go @@ -47,7 +47,7 @@ func (v *dynamicTables) Show(ctx context.Context, request *ShowDynamicTableReque } func (v *dynamicTables) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*DynamicTable, error) { - request := NewShowDynamicTableRequest().WithLike(&Like{Pattern: String(id.Name())}) + request := NewShowDynamicTableRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) dynamicTables, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/testint/dynamic_table_integration_test.go b/pkg/sdk/testint/dynamic_table_integration_test.go index b6efbd9d59..6a4df4bc52 100644 --- a/pkg/sdk/testint/dynamic_table_integration_test.go +++ b/pkg/sdk/testint/dynamic_table_integration_test.go @@ -38,6 +38,13 @@ func TestInt_DynamicTableCreateAndDrop(t *testing.T) { require.Equal(t, name.Name(), entity.Name) require.Equal(t, testWarehouse(t).ID().Name(), entity.Warehouse) require.Equal(t, *targetLag.MaximumDuration, entity.TargetLag) + + dynamicTableById, err := client.DynamicTables.ShowByID(ctx, name) + require.NoError(t, err) + require.NotNil(t, dynamicTableById) + require.Equal(t, name.Name(), dynamicTableById.Name) + require.Equal(t, testWarehouse(t).ID().Name(), dynamicTableById.Warehouse) + require.Equal(t, *targetLag.MaximumDuration, dynamicTableById.TargetLag) }) t.Run("test complete with target lag", func(t *testing.T) {