From f70e40e82a7f22e86276ab55ddf2584882229ccd Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 24 Jul 2024 14:16:17 +0200 Subject: [PATCH] feat: Rework schema datasource (#2954) Rework schema datasource: - add acceptance tests - add entry to the migration guide ## Test Plan * [x] acceptance tests ## References https://docs.snowflake.com/en/sql-reference/sql/show-schemas --- MIGRATION_GUIDE.md | 11 +- docs/data-sources/schemas.md | 379 +++++++++++++++++- .../snowflake_schemas/data-source.tf | 86 +++- pkg/datasources/schemas.go | 230 +++++++++-- pkg/datasources/schemas_acceptance_test.go | 214 +++++++++- .../testdata/TestAcc_Schemas/in/test.tf | 22 + .../testdata/TestAcc_Schemas/in/variables.tf | 27 ++ .../testdata/TestAcc_Schemas/like/test.tf | 19 + .../TestAcc_Schemas/like/variables.tf | 19 + .../testdata/TestAcc_Schemas/limit/test.tf | 22 + .../TestAcc_Schemas/limit/variables.tf | 23 ++ .../TestAcc_Schemas/non_existing/test.tf | 10 + .../TestAcc_Schemas/optionals_set/test.tf | 27 ++ .../optionals_set/variables.tf | 11 + .../TestAcc_Schemas/optionals_unset/test.tf | 18 + .../optionals_unset/variables.tf | 11 + .../TestAcc_Schemas/starts_with/test.tf | 19 + .../TestAcc_Schemas/starts_with/variables.tf | 19 + 18 files changed, 1096 insertions(+), 71 deletions(-) create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/in/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/in/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/like/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/like/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/limit/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/limit/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/non_existing/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/optionals_set/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/starts_with/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Schemas/starts_with/variables.tf diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 48ee0bee26..d4b588eca5 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -6,6 +6,16 @@ across different versions. ## v0.93.0 ➞ v0.94.0 +### *(breaking change)* refactored snowflake_schemas datasource +Changes: +- `database` is removed and can be specified inside `in` field. +- `like`, `in`, `starts_with`, and `limit` fields enable filtering. +- SHOW SCHEMAS output is enclosed in `show_output` field inside `schemas`. +- Added outputs from **DESC SCHEMA** and **SHOW PARAMETERS IN SCHEMA** (they can be turned off by declaring `with_describe = false` and `with_parameters = false`, **they're turned on by default**). + The additional parameters call **DESC SCHEMA** (with `with_describe` turned on) and **SHOW PARAMETERS IN SCHEMA** (with `with_parameters` turned on) **per schema** returned by **SHOW SCHEMAS**. + The outputs of both commands are held in `schemas` entry, where **DESC SCHEMA** is saved in the `describe_output` field, and **SHOW PARAMETERS IN SCHEMA** in the `parameters` field. + It's important to limit the records and calls to Snowflake to the minimum. That's why we recommend assessing which information you need from the data source and then providing strong filters and turning off additional fields for better plan performance. + ### *(new feature)* new snowflake_account_role resource Already existing `snowflake_role` was deprecated in favor of the new `snowflake_account_role`. The old resource got upgraded to @@ -45,7 +55,6 @@ Added a new datasource enabling querying and filtering stremlits. Notes: - `like`, `in`, and `limit` fields enable streamlits filtering. - SHOW STREAMLITS output is enclosed in `show_output` field inside `streamlits`. - Output from **DESC STREAMLIT** (which can be turned off by declaring `with_describe = false`, **it's turned on by default**) is enclosed in `describe_output` field inside `streamlits`. - **DESC STREAMLIT** returns different properties based on the integration type. Consult the documentation to check which ones will be filled for which integration. The additional parameters call **DESC STREAMLIT** (with `with_describe` turned on) **per streamlit** returned by **SHOW STREAMLITS**. It's important to limit the records and calls to Snowflake to the minimum. That's why we recommend assessing which information you need from the data source and then providing strong filters and turning off additional fields for better plan performance. diff --git a/docs/data-sources/schemas.md b/docs/data-sources/schemas.md index 8abb7c1857..2823553a55 100644 --- a/docs/data-sources/schemas.md +++ b/docs/data-sources/schemas.md @@ -2,38 +2,401 @@ page_title: "snowflake_schemas Data Source - terraform-provider-snowflake" subcategory: "" description: |- - + Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for SHOW SCHEMAS https://docs.snowflake.com/en/sql-reference/sql/show-schemas query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- # snowflake_schemas (Data Source) - +Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. ## Example Usage ```terraform -data "snowflake_schemas" "current" { - database = "MYDB" +# Simple usage +data "snowflake_schemas" "simple" { +} + +output "simple_output" { + value = data.snowflake_schemas.simple.schemas +} + +# Filtering (like) +data "snowflake_schemas" "like" { + like = "schema-name" +} + +output "like_output" { + value = data.snowflake_schemas.like.schemas +} + +# Filtering by prefix (like) +data "snowflake_schemas" "like_prefix" { + like = "prefix%" +} + +output "like_prefix_output" { + value = data.snowflake_schemas.like_prefix.schemas +} + +# Filtering (limit) +data "snowflake_schemas" "limit" { + limit { + rows = 10 + from = "prefix-" + } +} + +output "limit_output" { + value = data.snowflake_schemas.limit.schemas +} + +# Filtering (in) +data "snowflake_schemas" "in" { + in { + database = "database" + } +} + +output "in_output" { + value = data.snowflake_schemas.in.schemas +} + +# Without additional data (to limit the number of calls make for every found schema) +data "snowflake_schemas" "only_show" { + # with_describe is turned on by default and it calls DESCRIBE SCHEMA for every schema found and attaches its output to schemas.*.describe_output field + with_describe = false + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR SCHEMA for every schema found and attaches its output to schemas.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_schemas.only_show.schemas +} + +# Ensure the number of schemas is equal to at least one element (with the use of postcondition) +data "snowflake_schemas" "assert_with_postcondition" { + like = "schema-name%" + lifecycle { + postcondition { + condition = length(self.schemas) > 0 + error_message = "there should be at least one schema" + } + } +} + +# Ensure the number of schemas is equal to at exactly one element (with the use of check block) +check "schema_check" { + data "snowflake_schemas" "assert_with_check_block" { + like = "schema-name" + } + + assert { + condition = length(data.snowflake_schemas.assert_with_check_block.schemas) == 1 + error_message = "schemas filtered by '${data.snowflake_schemas.assert_with_check_block.like}' returned ${length(data.snowflake_schemas.assert_with_check_block.schemas)} schemas where one was expected" + } } ``` ## Schema -### Required +### Optional -- `database` (String) The database from which to return the schemas from. +- `in` (Block List, Max: 1) IN clause to filter the list of streamlits (see [below for nested schema](#nestedblock--in)) +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). +- `limit` (Block List, Max: 1) Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`. (see [below for nested schema](#nestedblock--limit)) +- `starts_with` (String) Filters the output with **case-sensitive** characters indicating the beginning of the object name. +- `with_describe` (Boolean) Runs DESC SCHEMA for each schema returned by SHOW SCHEMAS. The output of describe is saved to the description field. By default this value is set to true. +- `with_parameters` (Boolean) Runs SHOW PARAMETERS FOR SCHEMA for each schema returned by SHOW SCHEMAS. The output of describe is saved to the parameters field as a map. By default this value is set to true. ### Read-Only - `id` (String) The ID of this resource. -- `schemas` (List of Object) The schemas in the database (see [below for nested schema](#nestedatt--schemas)) +- `schemas` (List of Object) Holds the aggregated output of all SCHEMA details queries. (see [below for nested schema](#nestedatt--schemas)) + + +### Nested Schema for `in` + +Optional: + +- `account` (Boolean) Returns records for the entire account. +- `application` (String) Returns records for the specified application. +- `application_package` (String) Returns records for the specified application package. +- `database` (String) Returns records for the current database in use or for a specified database (db_name). + + + +### Nested Schema for `limit` + +Required: + +- `rows` (Number) The maximum number of rows to return. + +Optional: + +- `from` (String) Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied. + ### Nested Schema for `schemas` Read-Only: +- `describe_output` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--describe_output)) +- `parameters` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters)) +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--show_output)) + + +### Nested Schema for `schemas.describe_output` + +Read-Only: + +- `created_on` (String) +- `kind` (String) +- `name` (String) + + + +### Nested Schema for `schemas.parameters` + +Read-Only: + +- `catalog` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--catalog)) +- `data_retention_time_in_days` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--data_retention_time_in_days)) +- `default_ddl_collation` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--default_ddl_collation)) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--enable_console_output)) +- `external_volume` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--external_volume)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--log_level)) +- `max_data_extension_time_in_days` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--max_data_extension_time_in_days)) +- `pipe_execution_paused` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--pipe_execution_paused)) +- `quoted_identifiers_ignore_case` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--quoted_identifiers_ignore_case)) +- `replace_invalid_characters` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--replace_invalid_characters)) +- `storage_serialization_policy` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--storage_serialization_policy)) +- `suspend_task_after_num_failures` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--suspend_task_after_num_failures)) +- `task_auto_retry_attempts` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--task_auto_retry_attempts)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--trace_level)) +- `user_task_managed_initial_warehouse_size` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--user_task_managed_initial_warehouse_size)) +- `user_task_minimum_trigger_interval_in_seconds` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--user_task_minimum_trigger_interval_in_seconds)) +- `user_task_timeout_ms` (List of Object) (see [below for nested schema](#nestedobjatt--schemas--parameters--user_task_timeout_ms)) + + +### Nested Schema for `schemas.parameters.catalog` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.data_retention_time_in_days` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.default_ddl_collation` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.external_volume` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.max_data_extension_time_in_days` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.pipe_execution_paused` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.quoted_identifiers_ignore_case` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.replace_invalid_characters` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.storage_serialization_policy` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.suspend_task_after_num_failures` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.task_auto_retry_attempts` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.user_task_managed_initial_warehouse_size` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.user_task_minimum_trigger_interval_in_seconds` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `schemas.parameters.user_task_timeout_ms` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + + +### Nested Schema for `schemas.show_output` + +Read-Only: + - `comment` (String) -- `database` (String) +- `created_on` (String) +- `database_name` (String) +- `dropped_on` (String) +- `is_current` (Boolean) +- `is_default` (Boolean) - `name` (String) +- `options` (String) +- `owner` (String) +- `owner_role_type` (String) +- `retention_time` (String) diff --git a/examples/data-sources/snowflake_schemas/data-source.tf b/examples/data-sources/snowflake_schemas/data-source.tf index 7d84b77fde..d7925951ef 100644 --- a/examples/data-sources/snowflake_schemas/data-source.tf +++ b/examples/data-sources/snowflake_schemas/data-source.tf @@ -1,3 +1,83 @@ -data "snowflake_schemas" "current" { - database = "MYDB" -} \ No newline at end of file +# Simple usage +data "snowflake_schemas" "simple" { +} + +output "simple_output" { + value = data.snowflake_schemas.simple.schemas +} + +# Filtering (like) +data "snowflake_schemas" "like" { + like = "schema-name" +} + +output "like_output" { + value = data.snowflake_schemas.like.schemas +} + +# Filtering by prefix (like) +data "snowflake_schemas" "like_prefix" { + like = "prefix%" +} + +output "like_prefix_output" { + value = data.snowflake_schemas.like_prefix.schemas +} + +# Filtering (limit) +data "snowflake_schemas" "limit" { + limit { + rows = 10 + from = "prefix-" + } +} + +output "limit_output" { + value = data.snowflake_schemas.limit.schemas +} + +# Filtering (in) +data "snowflake_schemas" "in" { + in { + database = "database" + } +} + +output "in_output" { + value = data.snowflake_schemas.in.schemas +} + +# Without additional data (to limit the number of calls make for every found schema) +data "snowflake_schemas" "only_show" { + # with_describe is turned on by default and it calls DESCRIBE SCHEMA for every schema found and attaches its output to schemas.*.describe_output field + with_describe = false + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR SCHEMA for every schema found and attaches its output to schemas.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_schemas.only_show.schemas +} + +# Ensure the number of schemas is equal to at least one element (with the use of postcondition) +data "snowflake_schemas" "assert_with_postcondition" { + like = "schema-name%" + lifecycle { + postcondition { + condition = length(self.schemas) > 0 + error_message = "there should be at least one schema" + } + } +} + +# Ensure the number of schemas is equal to at exactly one element (with the use of check block) +check "schema_check" { + data "snowflake_schemas" "assert_with_check_block" { + like = "schema-name" + } + + assert { + condition = length(data.snowflake_schemas.assert_with_check_block.schemas) == 1 + error_message = "schemas filtered by '${data.snowflake_schemas.assert_with_check_block.like}' returned ${length(data.snowflake_schemas.assert_with_check_block.schemas)} schemas where one was expected" + } +} diff --git a/pkg/datasources/schemas.go b/pkg/datasources/schemas.go index 6ddfd2b0d2..a697ba19dd 100644 --- a/pkg/datasources/schemas.go +++ b/pkg/datasources/schemas.go @@ -2,38 +2,123 @@ package datasources import ( "context" - "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + resourceschemas "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) var schemasSchema = map[string]*schema.Schema{ - "database": { + "with_describe": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Runs DESC SCHEMA for each schema returned by SHOW SCHEMAS. The output of describe is saved to the description field. By default this value is set to true.", + }, + "with_parameters": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Runs SHOW PARAMETERS FOR SCHEMA for each schema returned by SHOW SCHEMAS. The output of describe is saved to the parameters field as a map. By default this value is set to true.", + }, + "like": { + Type: schema.TypeString, + Optional: true, + Description: "Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).", + }, + "starts_with": { Type: schema.TypeString, - Required: true, - Description: "The database from which to return the schemas from.", + Optional: true, + Description: "Filters the output with **case-sensitive** characters indicating the beginning of the object name.", + }, + "limit": { + Type: schema.TypeList, + Optional: true, + Description: "Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`.", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "rows": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum number of rows to return.", + }, + "from": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied.", + }, + }, + }, + }, + "in": { + Type: schema.TypeList, + Optional: true, + Description: "IN clause to filter the list of streamlits", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "account": { + Type: schema.TypeBool, + Optional: true, + Description: "Returns records for the entire account.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.application", "in.0.application_package"}, + }, + "database": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current database in use or for a specified database (db_name).", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.application", "in.0.application_package"}, + }, + "application": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the specified application.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.application", "in.0.application_package"}, + }, + "application_package": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the specified application package.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.application", "in.0.application_package"}, + }, + }, + }, }, "schemas": { Type: schema.TypeList, Computed: true, - Description: "The schemas in the database", + Description: "Holds the aggregated output of all SCHEMA details queries.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW SCHEMAS.", + Elem: &schema.Resource{ + Schema: schemas.ShowSchemaSchema, + }, }, - "database": { - Type: schema.TypeString, - Computed: true, + resources.DescribeOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of DESCRIBE SCHEMA.", + Elem: &schema.Resource{ + Schema: schemas.SchemaDescribeSchema, + }, }, - "comment": { - Type: schema.TypeString, - Optional: true, - Computed: true, + resources.ParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW PARAMETERS FOR SCHEMA.", + Elem: &schema.Resource{ + Schema: schemas.ShowSchemaParametersSchema, + }, }, }, }, @@ -42,38 +127,105 @@ var schemasSchema = map[string]*schema.Schema{ func Schemas() *schema.Resource { return &schema.Resource{ - Read: ReadSchemas, - Schema: schemasSchema, + ReadContext: ReadSchemas, + Schema: schemasSchema, + Description: "Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", } } -func ReadSchemas(d *schema.ResourceData, meta interface{}) error { +func ReadSchemas(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - ctx := context.Background() - databaseName := d.Get("database").(string) - databaseID := sdk.NewAccountObjectIdentifier(databaseName) - - currentSchemas, err := client.Schemas.Show(ctx, &sdk.ShowSchemaOptions{ - In: &sdk.SchemaIn{ - Database: sdk.Bool(true), - Name: databaseID, - }, - }) + var opts sdk.ShowSchemaOptions + + if likePattern, ok := d.GetOk("like"); ok { + opts.Like = &sdk.Like{ + Pattern: sdk.String(likePattern.(string)), + } + } + + if startsWith, ok := d.GetOk("starts_with"); ok { + opts.StartsWith = sdk.String(startsWith.(string)) + } + + if limit, ok := d.GetOk("limit"); ok && len(limit.([]any)) == 1 { + limitMap := limit.([]any)[0].(map[string]any) + + rows := limitMap["rows"].(int) + opts.LimitFrom = &sdk.LimitFrom{ + Rows: &rows, + } + + if from, ok := limitMap["from"].(string); ok { + opts.LimitFrom.From = &from + } + } + + if v, ok := d.GetOk("in"); ok { + in := v.([]interface{})[0].(map[string]interface{}) + if v, ok := in["account"]; ok { + if account := v.(bool); account { + opts.In = &sdk.SchemaIn{Account: sdk.Bool(account)} + } + } + if v, ok := in["database"]; ok { + if database := v.(string); database != "" { + opts.In = &sdk.SchemaIn{Name: sdk.NewAccountObjectIdentifier(database), Database: sdk.Pointer(true)} + } + } + if v, ok := in["application"]; ok { + if application := v.(string); application != "" { + opts.In = &sdk.SchemaIn{Name: sdk.NewAccountObjectIdentifier(application), Application: sdk.Pointer(true)} + } + } + if v, ok := in["application_package"]; ok { + if applicationPackage := v.(string); applicationPackage != "" { + opts.In = &sdk.SchemaIn{Name: sdk.NewAccountObjectIdentifier(applicationPackage), ApplicationPackage: sdk.Pointer(true)} + } + } + } + + schemas, err := client.Schemas.Show(ctx, &opts) if err != nil { - log.Printf("[DEBUG] unable to show schemas in database (%s)", databaseName) - d.SetId("") - return nil + return diag.FromErr(err) } + d.SetId("schemas_read") + + flattenedSchemas := make([]map[string]any, len(schemas)) + + for i, schema := range schemas { + schema := schema + var schemaDescription []map[string]any + if d.Get("with_describe").(bool) { + describeResult, err := client.Schemas.Describe(ctx, schema.ID()) + if err != nil { + return diag.FromErr(err) + } + schemaDescription = resourceschemas.SchemaDescriptionToSchema(describeResult) + } + + var schemaParameters []map[string]any + if d.Get("with_parameters").(bool) { + parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Schema: schema.ID(), + }, + }) + if err != nil { + return diag.FromErr(err) + } + schemaParameters = []map[string]any{resourceschemas.SchemaParametersToSchema(parameters)} + } - schemas := make([]map[string]any, len(currentSchemas)) - for i, cs := range currentSchemas { - schemas[i] = map[string]any{ - "name": cs.Name, - "database": cs.DatabaseName, - "comment": cs.Comment, + flattenedSchemas[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{resourceschemas.SchemaToSchema(&schema)}, + resources.DescribeOutputAttributeName: schemaDescription, + resources.ParametersAttributeName: schemaParameters, } } - d.SetId(databaseName) - return d.Set("schemas", schemas) + err = d.Set("schemas", flattenedSchemas) + if err != nil { + return diag.FromErr(err) + } + return nil } diff --git a/pkg/datasources/schemas_acceptance_test.go b/pkg/datasources/schemas_acceptance_test.go index ec72e5b06b..0aba2b49b7 100644 --- a/pkg/datasources/schemas_acceptance_test.go +++ b/pkg/datasources/schemas_acceptance_test.go @@ -2,51 +2,225 @@ package datasources_test import ( "fmt" + "maps" + "regexp" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -func TestAcc_Schemas(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() +func TestAcc_Schemas_Complete(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + id := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() + + configVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable("foo"), + "database": config.StringVariable(databaseId.Name()), + } + resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.Schema), Steps: []resource.TestStep{ { - Config: schemas(databaseName, schemaName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_schemas.s", "database", databaseName), - resource.TestCheckResourceAttrSet("data.snowflake_schemas.s", "schemas.#"), - resource.TestCheckResourceAttr("data.snowflake_schemas.s", "schemas.#", "3"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/optionals_set"), + ConfigVariables: configVariables, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "1"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.created_on"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.name", id.Name()), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.is_default", "false"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.is_current"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.database_name", databaseId.Name()), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.owner"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.comment", "foo"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.options", "TRANSIENT, MANAGED ACCESS"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.retention_time"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.owner_role_type"), + + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.parameters.#", "1"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.data_retention_time_in_days.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.max_data_extension_time_in_days.0.value"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.parameters.0.external_volume.0.value", ""), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.parameters.0.catalog.0.value", ""), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.replace_invalid_characters.0.value"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.parameters.0.default_ddl_collation.0.value", ""), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.storage_serialization_policy.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.log_level.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.trace_level.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.suspend_task_after_num_failures.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.task_auto_retry_attempts.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.user_task_managed_initial_warehouse_size.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.user_task_minimum_trigger_interval_in_seconds.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.quoted_identifiers_ignore_case.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.enable_console_output.0.value"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.parameters.0.pipe_execution_paused.0.value"), + + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.describe_output.#", "1"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.describe_output.0.created_on"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.describe_output.0.name"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.describe_output.0.kind", "TABLE"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/optionals_unset"), + ConfigVariables: configVariables, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "1"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.created_on"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.name", id.Name()), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.is_default", "false"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.is_current"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.database_name", databaseId.Name()), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.owner"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.comment", "foo"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.show_output.0.options", "TRANSIENT, MANAGED ACCESS"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.retention_time"), + resource.TestCheckResourceAttrSet("data.snowflake_schemas.test", "schemas.0.show_output.0.owner_role_type"), + + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.describe_output.#", "0"), + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.0.parameters.#", "0"), ), }, }, }) } -func schemas(databaseName string, schemaName string) string { - return fmt.Sprintf(` +func TestAcc_Schemas_Filtering(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + prefix := random.AlphaN(4) + idOne := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idTwo := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idThree := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() - resource snowflake_database "d" { - name = "%v" + database2, database2Cleanup := acc.TestClient().Database.CreateDatabase(t) + t.Cleanup(database2Cleanup) + + commonVariables := config.Variables{ + "name_1": config.StringVariable(idOne.Name()), + "name_2": config.StringVariable(idTwo.Name()), + "name_3": config.StringVariable(idThree.Name()), + "database": config.StringVariable(databaseId.Name()), } - resource snowflake_schema "s"{ - name = "%v" - database = snowflake_database.d.name + likeConfig := config.Variables{ + "like": config.StringVariable(idOne.Name()), } + maps.Copy(likeConfig, commonVariables) - data snowflake_schemas "s" { - database = snowflake_schema.s.database - depends_on = [snowflake_schema.s] + startsWithConfig := config.Variables{ + "starts_with": config.StringVariable(prefix), } - `, databaseName, schemaName) + maps.Copy(startsWithConfig, commonVariables) + + limitConfig := config.Variables{ + "rows": config.IntegerVariable(1), + "from": config.StringVariable(prefix), + } + maps.Copy(limitConfig, commonVariables) + + inConfig := config.Variables{ + "in": config.StringVariable(acc.TestDatabaseName), + "database_1": config.StringVariable(databaseId.Name()), + "database_2": config.StringVariable(database2.ID().Name()), + "starts_with": config.StringVariable(prefix), + } + maps.Copy(inConfig, commonVariables) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/like"), + ConfigVariables: likeConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "1"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/starts_with"), + ConfigVariables: startsWithConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "2"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/limit"), + ConfigVariables: limitConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "1"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/in"), + ConfigVariables: inConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_schemas.test", "schemas.#", "1"), + ), + }, + }, + }) +} + +func TestAcc_Schemas_BadCombination(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: schemasDatasourceConfigDbAndSchema(), + ExpectError: regexp.MustCompile("Invalid combination of arguments"), + }, + }, + }) +} + +func TestAcc_Schemas_SchemaNotFound_WithPostConditions(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schemas/non_existing"), + ExpectError: regexp.MustCompile("there should be at least one schema"), + }, + }, + }) +} + +func schemasDatasourceConfigDbAndSchema() string { + return fmt.Sprintf(` +data "snowflake_schemas" "test" { + in { + database = "%s" + application = "foo" + application_package = "bar" + } +} +`, acc.TestDatabaseName) } diff --git a/pkg/datasources/testdata/TestAcc_Schemas/in/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/in/test.tf new file mode 100644 index 0000000000..cdced966d0 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/in/test.tf @@ -0,0 +1,22 @@ +resource "snowflake_schema" "test_1" { + name = var.name_1 + database = var.database_1 +} + +resource "snowflake_schema" "test_2" { + name = var.name_2 + database = var.database_2 +} + +resource "snowflake_schema" "test_3" { + name = var.name_3 + database = var.database_2 +} + +data "snowflake_schemas" "test" { + depends_on = [snowflake_schema.test_1, snowflake_schema.test_2, snowflake_schema.test_3] + in { + database = var.in + } + starts_with = var.starts_with +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/in/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/in/variables.tf new file mode 100644 index 0000000000..c70e5783d6 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/in/variables.tf @@ -0,0 +1,27 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "database_1" { + type = string +} + +variable "database_2" { + type = string +} + +variable "in" { + type = string +} + +variable "starts_with" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/like/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/like/test.tf new file mode 100644 index 0000000000..957cfa5182 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/like/test.tf @@ -0,0 +1,19 @@ +resource "snowflake_schema" "test_1" { + name = var.name_1 + database = var.database +} + +resource "snowflake_schema" "test_2" { + name = var.name_2 + database = var.database +} + +resource "snowflake_schema" "test_3" { + name = var.name_3 + database = var.database +} + +data "snowflake_schemas" "test" { + depends_on = [snowflake_schema.test_1, snowflake_schema.test_2, snowflake_schema.test_3] + like = var.like +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/like/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/like/variables.tf new file mode 100644 index 0000000000..3e39c7b232 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/like/variables.tf @@ -0,0 +1,19 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "database" { + type = string +} + +variable "like" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/limit/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/limit/test.tf new file mode 100644 index 0000000000..2e65cba364 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/limit/test.tf @@ -0,0 +1,22 @@ +resource "snowflake_schema" "test_1" { + name = var.name_1 + database = var.database +} + +resource "snowflake_schema" "test_2" { + name = var.name_2 + database = var.database +} + +resource "snowflake_schema" "test_3" { + name = var.name_3 + database = var.database +} + +data "snowflake_schemas" "test" { + depends_on = [snowflake_schema.test_1, snowflake_schema.test_2, snowflake_schema.test_3] + limit { + rows = var.rows + from = var.from + } +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/limit/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/limit/variables.tf new file mode 100644 index 0000000000..3139f4bcb8 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/limit/variables.tf @@ -0,0 +1,23 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "database" { + type = string +} + +variable "rows" { + type = number +} + +variable "from" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/non_existing/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/non_existing/test.tf new file mode 100644 index 0000000000..2dbad5d96f --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/non_existing/test.tf @@ -0,0 +1,10 @@ +data "snowflake_schemas" "test" { + like = "non-existing-schema" + + lifecycle { + postcondition { + condition = length(self.schemas) > 0 + error_message = "there should be at least one schema" + } + } +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf new file mode 100644 index 0000000000..98eb66450b --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf @@ -0,0 +1,27 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database + comment = var.comment + is_transient = true + is_managed = true +} + +resource "snowflake_table" "test" { + database = var.database + schema = snowflake_schema.test.name + name = "table" + + column { + name = "id" + type = "int" + } +} + +data "snowflake_schemas" "test" { + depends_on = [snowflake_table.test] + like = var.name + starts_with = var.name + limit { + rows = 1 + } +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/variables.tf new file mode 100644 index 0000000000..0c8231993a --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf new file mode 100644 index 0000000000..0bbc4a0850 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf @@ -0,0 +1,18 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database + comment = var.comment + is_transient = true + is_managed = true +} + +data "snowflake_schemas" "test" { + with_describe = false + with_parameters = false + depends_on = [snowflake_schema.test] + like = var.name + starts_with = var.name + limit { + rows = 1 + } +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/variables.tf new file mode 100644 index 0000000000..0c8231993a --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/starts_with/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/starts_with/test.tf new file mode 100644 index 0000000000..5ce46b2334 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/starts_with/test.tf @@ -0,0 +1,19 @@ +resource "snowflake_schema" "test_1" { + name = var.name_1 + database = var.database +} + +resource "snowflake_schema" "test_2" { + name = var.name_2 + database = var.database +} + +resource "snowflake_schema" "test_3" { + name = var.name_3 + database = var.database +} + +data "snowflake_schemas" "test" { + depends_on = [snowflake_schema.test_1, snowflake_schema.test_2, snowflake_schema.test_3] + starts_with = var.starts_with +} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/starts_with/variables.tf b/pkg/datasources/testdata/TestAcc_Schemas/starts_with/variables.tf new file mode 100644 index 0000000000..b729f25c6d --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Schemas/starts_with/variables.tf @@ -0,0 +1,19 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "database" { + type = string +} + +variable "starts_with" { + type = string +}